Compare commits

..

31 Commits

Author SHA1 Message Date
810aff4163 Merge pull request 'feature/artwork_gallery' (#41) from feature/artwork_gallery into experimental
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
Reviewed-on: #41
2024-07-15 09:36:21 +00:00
5ce76c758e feat: genius fixes and duplicate detection
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/pull_request_closed/woodpecker Pipeline was successful
2024-07-02 17:20:25 +02:00
93c9a367a2 feat: image hash implemented
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-07-01 14:59:51 +02:00
17c28722fb feat: musify ArtworkCollection simple function
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-17 14:50:17 +02:00
dd99e60afd fix: circular input
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-11 14:58:04 +02:00
274f1bce90 feat: implemented fetching of artworks on compile
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-11 14:54:36 +02:00
b1a306f3f3 fix: implemented artwork.add_data 2024-06-11 14:34:58 +02:00
4ee6fd2137 feat:a lot of nonsences
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-10 12:23:12 +02:00
2da7a48b72 feat: added compile
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-07 11:27:55 +02:00
346d273201 feat: added extend
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-07 11:17:47 +02:00
eef3ea7f07 feat: removed distracting code
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-07 11:15:23 +02:00
01dffc2443 Merge branch 'feature/artwork_gallery' of ssh://gitea.elara.ws:2222/music-kraken/music-kraken-core into feature/artwork_gallery
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-06 17:53:44 +02:00
4e50bb1fba draft implemented add_data 2024-06-06 17:53:17 +02:00
8e3ec0f4ed Merge branch 'feature/artwork_gallery' of ssh://gitea.elara.ws:2222/music-kraken/music-kraken-core into feature/artwork_gallery
All checks were successful
ci/woodpecker/pr/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-05 13:45:25 +02:00
d447b10380 feat: youtube music album and artist artwork 2024-06-05 13:33:18 +02:00
df98a70717 feat: renamed artwork
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-05 12:05:38 +02:00
3118140f0f feat: fix saving img in tmp
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-05 09:47:02 +02:00
7d23ecac06 feat: bandcamp artist artwork
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-05 08:34:37 +02:00
d83e40ed83 feat: config changes
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-04 11:44:48 +02:00
d51e3a56fb feat: structure changes to artwork and collection objects
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/pr/woodpecker Pipeline was successful
2024-06-04 11:04:00 +02:00
05ee09e25f feat: musify completed
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-04 10:58:21 +02:00
1ef4b27f28 feat: added album.artwork to datastructure
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-04 10:31:23 +02:00
eb8fd5e580 feat: added artist.artwork to data structure 2024-06-04 10:13:34 +02:00
49c3734526 feat: added hooks for collection on append
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-04 10:11:46 +02:00
bc19a94e7f feat: added parent artwork options 2024-06-04 10:09:17 +02:00
5d26fdbf94 Artwork gallery Musify
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-04 07:58:18 +02:00
465af49057 hotfix
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-06-03 10:19:32 +02:00
2aa0f02fa5 Merge branch 'adding_genius' into experimental
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
2024-05-23 13:36:10 +02:00
7b0b830d64 feat: removed legacy key
Some checks failed
ci/woodpecker/push/woodpecker Pipeline failed
2024-05-23 13:24:25 +02:00
1ba6c97f5a feat: more extensive browse id 2024-05-23 13:20:34 +02:00
c8cbfc7cb9 feat: improved output of clearing the cache 2024-05-23 13:17:14 +02:00
21 changed files with 786 additions and 335 deletions

View File

@ -20,6 +20,7 @@
"APIC", "APIC",
"Bandcamp", "Bandcamp",
"bitrate", "bitrate",
"CALLSTACK",
"DEEZER", "DEEZER",
"dotenv", "dotenv",
"encyclopaedia", "encyclopaedia",

View File

@ -1,15 +1,13 @@
import logging
import music_kraken import music_kraken
import logging
print("Setting logging-level to DEBUG") print("Setting logging-level to DEBUG")
logging.getLogger().setLevel(logging.DEBUG) logging.getLogger().setLevel(logging.DEBUG)
if __name__ == "__main__": if __name__ == "__main__":
commands = [ commands = [
"s: #a Crystal F", "s: #a Ghost Bath",
"10",
"1",
"3",
] ]

View File

@ -1,21 +1,21 @@
import mutagen import logging
from mutagen.id3 import ID3, Frame, APIC, USLT
from pathlib import Path from pathlib import Path
from typing import List from typing import List
import logging
import mutagen
from mutagen.id3 import APIC, ID3, USLT, Frame
from PIL import Image from PIL import Image
from ..utils.config import logging_settings, main_settings
from ..objects import Song, Target, Metadata
from ..objects.metadata import Mapping
from ..connection import Connection from ..connection import Connection
from ..objects import Metadata, Song, Target
from ..objects.metadata import Mapping
from ..utils.config import logging_settings, main_settings
LOGGER = logging_settings["tagging_logger"] LOGGER = logging_settings["tagging_logger"]
artwork_connection: Connection = Connection() artwork_connection: Connection = Connection()
class AudioMetadata: class AudioMetadata:
def __init__(self, file_location: str = None) -> None: def __init__(self, file_location: str = None) -> None:
self._file_location = None self._file_location = None
@ -67,13 +67,14 @@ def write_metadata_to_target(metadata: Metadata, target: Target, song: Song):
id3_object = AudioMetadata(file_location=target.file_path) id3_object = AudioMetadata(file_location=target.file_path)
LOGGER.info(str(metadata)) LOGGER.info(str(metadata))
## REWRITE COMPLETLY !!!!!!!!!!!!
if song.artwork.best_variant is not None: if len(song.artwork._data) != 0:
best_variant = song.artwork.best_variant variants = song.artwork._data.__getitem__(0)
best_variant = variants.variants.__getitem__(0)
r = artwork_connection.get( r = artwork_connection.get(
url=best_variant["url"], url=best_variant.url,
name=song.artwork.get_variant_name(best_variant), name=best_variant.url,
) )
temp_target: Target = Target.temp() temp_target: Target = Target.temp()
@ -107,7 +108,7 @@ def write_metadata_to_target(metadata: Metadata, target: Target, song: Song):
mime="image/jpeg", mime="image/jpeg",
type=3, type=3,
desc=u"Cover", desc=u"Cover",
data=converted_target.read_bytes(), data=converted_target.raw_content,
) )
) )
id3_object.frames.delall("USLT") id3_object.frames.delall("USLT")

View File

@ -6,6 +6,7 @@ from typing import List, Optional
from functools import lru_cache from functools import lru_cache
import logging import logging
from ..utils import output, BColors
from ..utils.config import main_settings from ..utils.config import main_settings
from ..utils.string_processing import fit_to_file_system from ..utils.string_processing import fit_to_file_system
@ -204,9 +205,12 @@ class Cache:
for path in self._dir.iterdir(): for path in self._dir.iterdir():
if path.is_dir(): if path.is_dir():
for file in path.iterdir(): for file in path.iterdir():
output(f"Deleting file {file}", color=BColors.GREY)
file.unlink() file.unlink()
output(f"Deleting folder {path}", color=BColors.HEADER)
path.rmdir() path.rmdir()
else: else:
output(f"Deleting folder {path}", color=BColors.HEADER)
path.unlink() path.unlink()
self.cached_attributes.clear() self.cached_attributes.clear()

View File

@ -1,12 +1,12 @@
from __future__ import annotations from __future__ import annotations
import copy
import inspect
import logging import logging
import threading import threading
import time import time
from typing import List, Dict, Optional, Set from typing import TYPE_CHECKING, Dict, List, Optional, Set
from urllib.parse import urlparse, urlunsplit, ParseResult from urllib.parse import ParseResult, urlparse, urlunsplit
import copy
import inspect
import requests import requests
import responses import responses
@ -14,12 +14,15 @@ from tqdm import tqdm
from .cache import Cache from .cache import Cache
from .rotating import RotatingProxy from .rotating import RotatingProxy
from ..objects import Target
if TYPE_CHECKING:
from ..objects import Target
from ..utils import request_trace from ..utils import request_trace
from ..utils.string_processing import shorten_display_url
from ..utils.config import main_settings from ..utils.config import main_settings
from ..utils.support_classes.download_result import DownloadResult
from ..utils.hacking import merge_args from ..utils.hacking import merge_args
from ..utils.string_processing import shorten_display_url
from ..utils.support_classes.download_result import DownloadResult
class Connection: class Connection:

View File

@ -3,6 +3,9 @@ from collections import defaultdict
from pathlib import Path from pathlib import Path
import re import re
import logging import logging
import subprocess
from PIL import Image
from . import FetchOptions, DownloadOptions from . import FetchOptions, DownloadOptions
from .results import SearchResults from .results import SearchResults
@ -17,6 +20,7 @@ from ..objects import (
Artist, Artist,
Label, Label,
) )
from ..objects.artwork import ArtworkVariant
from ..audio import write_metadata_to_target, correct_codec from ..audio import write_metadata_to_target, correct_codec
from ..utils import output, BColors from ..utils import output, BColors
from ..utils.string_processing import fit_to_file_system from ..utils.string_processing import fit_to_file_system
@ -29,10 +33,10 @@ from ..utils.support_classes.download_result import DownloadResult
from ..utils.exception import MKMissingNameException from ..utils.exception import MKMissingNameException
from ..utils.exception.download import UrlNotFoundException from ..utils.exception.download import UrlNotFoundException
from ..utils.shared import DEBUG_PAGES from ..utils.shared import DEBUG_PAGES
from ..connection import Connection
from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, YoutubeMusic, Bandcamp, Genius, INDEPENDENT_DB_OBJECTS from ..pages import Page, EncyclopaediaMetallum, Musify, YouTube, YoutubeMusic, Bandcamp, Genius, INDEPENDENT_DB_OBJECTS
ALL_PAGES: Set[Type[Page]] = { ALL_PAGES: Set[Type[Page]] = {
# EncyclopaediaMetallum, # EncyclopaediaMetallum,
Genius, Genius,
@ -87,7 +91,8 @@ class Pages:
exclude_pages = exclude_pages.union(SHADY_PAGES) exclude_pages = exclude_pages.union(SHADY_PAGES)
if not exclude_pages.issubset(ALL_PAGES): if not exclude_pages.issubset(ALL_PAGES):
raise ValueError(f"The excluded pages have to be a subset of all pages: {exclude_pages} | {ALL_PAGES}") raise ValueError(
f"The excluded pages have to be a subset of all pages: {exclude_pages} | {ALL_PAGES}")
def _set_to_tuple(page_set: Set[Type[Page]]) -> Tuple[Type[Page], ...]: def _set_to_tuple(page_set: Set[Type[Page]]) -> Tuple[Type[Page], ...]:
return tuple(sorted(page_set, key=lambda page: page.__name__)) return tuple(sorted(page_set, key=lambda page: page.__name__))
@ -95,11 +100,14 @@ class Pages:
self._pages_set: Set[Type[Page]] = ALL_PAGES.difference(exclude_pages) self._pages_set: Set[Type[Page]] = ALL_PAGES.difference(exclude_pages)
self.pages: Tuple[Type[Page], ...] = _set_to_tuple(self._pages_set) self.pages: Tuple[Type[Page], ...] = _set_to_tuple(self._pages_set)
self._audio_pages_set: Set[Type[Page]] = self._pages_set.intersection(AUDIO_PAGES) self._audio_pages_set: Set[Type[Page]
self.audio_pages: Tuple[Type[Page], ...] = _set_to_tuple(self._audio_pages_set) ] = self._pages_set.intersection(AUDIO_PAGES)
self.audio_pages: Tuple[Type[Page], ...] = _set_to_tuple(
self._audio_pages_set)
for page_type in self.pages: for page_type in self.pages:
self._page_instances[page_type] = page_type(fetch_options=self.fetch_options, download_options=self.download_options) self._page_instances[page_type] = page_type(
fetch_options=self.fetch_options, download_options=self.download_options)
self._source_to_page[page_type.SOURCE_TYPE] = page_type self._source_to_page[page_type.SOURCE_TYPE] = page_type
def _get_page_from_enum(self, source_page: SourceType) -> Page: def _get_page_from_enum(self, source_page: SourceType) -> Page:
@ -113,7 +121,8 @@ class Pages:
for page_type in self.pages: for page_type in self.pages:
result.add( result.add(
page=page_type, page=page_type,
search_result=self._page_instances[page_type].search(query=query) search_result=self._page_instances[page_type].search(
query=query)
) )
return result return result
@ -126,7 +135,8 @@ class Pages:
for source in data_object.source_collection.get_sources(source_type_sorting={ for source in data_object.source_collection.get_sources(source_type_sorting={
"only_with_page": True, "only_with_page": True,
}): }):
new_data_object = self.fetch_from_source(source=source, stop_at_level=stop_at_level) new_data_object = self.fetch_from_source(
source=source, stop_at_level=stop_at_level)
if new_data_object is not None: if new_data_object is not None:
data_object.merge(new_data_object) data_object.merge(new_data_object)
@ -162,10 +172,38 @@ class Pages:
return False return False
def _fetch_artist_artwork(self, artist: Artist, naming: dict):
naming: Dict[str, List[str]] = defaultdict(list, naming)
naming["artist"].append(artist.name)
naming["label"].extend(
[l.title_value for l in artist.label_collection])
# removing duplicates from the naming, and process the strings
for key, value in naming.items():
# https://stackoverflow.com/a/17016257
naming[key] = list(dict.fromkeys(value))
artwork_collection: ArtworkCollection = artist.artwork
artwork_collection.compile()
for image_number, artwork in enumerate(artwork_collection):
for artwork_variant in artwork.variants:
naming["image_number"] = [str(image_number)]
target = Target(
relative_to_music_dir=True,
file_path=Path(self._parse_path_template(
main_settings["artist_artwork_path"], naming=naming))
)
if not target.file_path.parent.exists():
target.create_path()
subprocess.Popen(["gio", "set", target.file_path.parent, "metadata::custom-icon", "file://"+str(target.file_path)])
with Image.open(artwork_variant.target.file_path) as img:
img.save(target.file_path, main_settings["image_format"])
artwork_variant.target = Target
def download(self, data_object: DataObject, genre: str, **kwargs) -> DownloadResult: def download(self, data_object: DataObject, genre: str, **kwargs) -> DownloadResult:
# fetch the given object # fetch the given object
self.fetch_details(data_object) self.fetch_details(data_object)
output(f"\nDownloading {data_object.option_string}...", color=BColors.BOLD) output(
f"\nDownloading {data_object.option_string}...", color=BColors.BOLD)
# fetching all parent objects (e.g. if you only download a song) # fetching all parent objects (e.g. if you only download a song)
if not kwargs.get("fetched_upwards", False): if not kwargs.get("fetched_upwards", False):
@ -186,6 +224,16 @@ class Pages:
kwargs["fetched_upwards"] = True kwargs["fetched_upwards"] = True
naming = kwargs.get("naming", {
"genre": [genre],
"audio_format": [main_settings["audio_format"]],
"image_format": [main_settings["image_format"]]
})
# download artist artwork
if isinstance(data_object, Artist):
self._fetch_artist_artwork(artist=data_object, naming=naming)
# download all children # download all children
download_result: DownloadResult = DownloadResult() download_result: DownloadResult = DownloadResult()
for c in data_object.get_child_collections(): for c in data_object.get_child_collections():
@ -203,10 +251,7 @@ class Pages:
I am able to do that, because duplicate values are removed later on. I am able to do that, because duplicate values are removed later on.
""" """
self._download_song(data_object, naming={ self._download_song(data_object, naming=naming)
"genre": [genre],
"audio_format": [main_settings["audio_format"]],
})
return download_result return download_result
@ -214,13 +259,15 @@ class Pages:
return set(re.findall(r"{([^}]+)}", path_template)) return set(re.findall(r"{([^}]+)}", path_template))
def _parse_path_template(self, path_template: str, naming: Dict[str, List[str]]) -> str: def _parse_path_template(self, path_template: str, naming: Dict[str, List[str]]) -> str:
field_names: Set[str] = self._extract_fields_from_template(path_template) field_names: Set[str] = self._extract_fields_from_template(
path_template)
for field in field_names: for field in field_names:
if len(naming[field]) == 0: if len(naming[field]) == 0:
raise MKMissingNameException(f"Missing field for {field}.") raise MKMissingNameException(f"Missing field for {field}.")
path_template = path_template.replace(f"{{{field}}}", naming[field][0]) path_template = path_template.replace(
f"{{{field}}}", naming[field][0])
return path_template return path_template
@ -239,7 +286,8 @@ class Pages:
naming["song"].append(song.title_value) naming["song"].append(song.title_value)
naming["isrc"].append(song.isrc) naming["isrc"].append(song.isrc)
naming["album"].extend(a.title_value for a in song.album_collection) naming["album"].extend(a.title_value for a in song.album_collection)
naming["album_type"].extend(a.album_type.value for a in song.album_collection) naming["album_type"].extend(
a.album_type.value for a in song.album_collection)
naming["artist"].extend(a.name for a in song.artist_collection) naming["artist"].extend(a.name for a in song.artist_collection)
naming["artist"].extend(a.name for a in song.feature_artist_collection) naming["artist"].extend(a.name for a in song.feature_artist_collection)
for a in song.album_collection: for a in song.album_collection:
@ -256,13 +304,16 @@ class Pages:
song.target_collection.append(Target( song.target_collection.append(Target(
relative_to_music_dir=True, relative_to_music_dir=True,
file_path=Path( file_path=Path(
self._parse_path_template(main_settings["download_path"], naming=naming), self._parse_path_template(
self._parse_path_template(main_settings["download_file"], naming=naming), main_settings["download_path"], naming=naming),
self._parse_path_template(
main_settings["download_file"], naming=naming),
) )
)) ))
for target in song.target_collection: for target in song.target_collection:
if target.exists: if target.exists:
output(f'{target.file_path} {BColors.OKGREEN.value}[already exists]', color=BColors.GREY) output(
f'{target.file_path} {BColors.OKGREEN.value}[already exists]', color=BColors.GREY)
r.found_on_disk += 1 r.found_on_disk += 1
if not self.download_options.download_again_if_found: if not self.download_options.download_again_if_found:
@ -283,8 +334,10 @@ class Pages:
break break
used_source = source used_source = source
streaming_results = source.page.download_song_to_target(source=source, target=tmp, desc="download") streaming_results = source.page.download_song_to_target(
skip_intervals = source.page.get_skip_intervals(song=song, source=source) source=source, target=tmp, desc="download")
skip_intervals = source.page.get_skip_intervals(
song=song, source=source)
# if something has been downloaded but it somehow failed, delete the file # if something has been downloaded but it somehow failed, delete the file
if streaming_results.is_fatal_error and tmp.exists: if streaming_results.is_fatal_error and tmp.exists:
@ -308,7 +361,8 @@ class Pages:
used_source.page.post_process_hook(song=song, temp_target=tmp) used_source.page.post_process_hook(song=song, temp_target=tmp)
if not found_on_disk or self.download_options.process_metadata_if_found: if not found_on_disk or self.download_options.process_metadata_if_found:
write_metadata_to_target(metadata=song.metadata, target=tmp, song=song) write_metadata_to_target(
metadata=song.metadata, target=tmp, song=song)
# copy the tmp target to the final locations # copy the tmp target to the final locations
for target in song.target_collection: for target in song.target_collection:

View File

@ -1,27 +1,16 @@
from typing_extensions import TypeVar from typing_extensions import TypeVar
from .artwork import ArtworkCollection
from .collection import Collection
from .contact import Contact
from .country import Country
from .formatted_text import FormattedText
from .metadata import ID3Timestamp
from .metadata import Mapping as ID3Mapping
from .metadata import Metadata
from .option import Options from .option import Options
from .parents import OuterProxy
from .metadata import Metadata, Mapping as ID3Mapping, ID3Timestamp from .song import Album, Artist, Label, Lyrics, Song, Target
from .source import Source, SourceType from .source import Source, SourceType
from .song import (
Song,
Album,
Artist,
Target,
Lyrics,
Label
)
from .formatted_text import FormattedText
from .collection import Collection
from .country import Country
from .contact import Contact
from .parents import OuterProxy
from .artwork import Artwork
DatabaseObject = OuterProxy DatabaseObject = OuterProxy

View File

@ -1,64 +1,243 @@
from __future__ import annotations from __future__ import annotations
from typing import List, Optional, Dict, Tuple, Type, Union, TypedDict from copy import copy
from dataclasses import dataclass, field
from .collection import Collection from functools import cached_property
from .metadata import ( from typing import Dict, List, Optional, Set, Tuple, Type, TypedDict, Union
Mapping as id3Mapping,
ID3Timestamp,
Metadata
)
from ..utils.string_processing import unify, hash_url
from .parents import OuterProxy as Base
from ..connection import Connection
from ..utils import create_dataclass_instance, custom_hash
from ..utils.config import main_settings from ..utils.config import main_settings
from ..utils.enums import PictureType
from ..utils.string_processing import hash_url, unify
from .collection import Collection
from .metadata import ID3Timestamp
from .metadata import Mapping as id3Mapping
from .metadata import Metadata
from .parents import OuterProxy as Base
from .target import Target
from PIL import Image
import imagehash
artwork_connection: Connection = Connection(module="artwork")
class ArtworkVariant(TypedDict): @dataclass
class ArtworkVariant:
url: str url: str
width: int width: Optional[int] = None
height: int heigth: Optional[int] = None
deviation: float image_format: Optional[str] = None
def __hash__(self) -> int:
return custom_hash(self.url)
class Artwork: def __eq__(self, other: ArtworkVariant) -> bool:
def __init__(self, *variants: List[ArtworkVariant]) -> None: return hash(self) == hash(other)
self._variant_mapping: Dict[str, ArtworkVariant] = {}
for variant in variants: def __contains__(self, other: str) -> bool:
self.append(**variant) return custom_hash(other) == hash(self.url)
@staticmethod def __merge__(self, other: ArtworkVariant) -> None:
def _calculate_deviation(*dimensions: List[int]) -> float: for key, value in other.__dict__.items():
return sum(abs(d - main_settings["preferred_artwork_resolution"]) for d in dimensions) / len(dimensions) if value is None:
continue
def append(self, url: str, width: int = main_settings["preferred_artwork_resolution"], height: int = main_settings["preferred_artwork_resolution"], **kwargs) -> None: if getattr(self, key) is None:
if url is None: setattr(self, key, value)
@cached_property
def target(self) -> Target:
return Target.temp()
def fetch(self) -> None:
global artwork_connection
r = artwork_connection.get(self.url, name=hash_url(self.url))
if r is None:
return return
self._variant_mapping[hash_url(url=url)] = { self.target.raw_content = r.content
"url": url,
"width": width, @dataclass
"height": height, class Artwork:
"deviation": self._calculate_deviation(width, height), variants: List[ArtworkVariant] = field(default_factory=list)
}
artwork_type: PictureType = PictureType.OTHER
def search_variant(self, url: str) -> Optional[ArtworkVariant]:
if url is None:
return None
for variant in self.variants:
if url in variant:
return variant
return None
def __contains__(self, other: str) -> bool:
return self.search_variant(other) is not None
def add_data(self, **kwargs) -> None:
variant = self.search_variant(kwargs.get("url"))
if variant is None:
variant, kwargs = create_dataclass_instance(ArtworkVariant, kwargs)
self.variants.append(variant)
variant.__dict__.update(kwargs)
@property @property
def best_variant(self) -> ArtworkVariant: def url(self) -> Optional[str]:
if len(self._variant_mapping.keys()) <= 0: if len(self.variants) <= 0:
return None return None
return min(self._variant_mapping.values(), key=lambda x: x["deviation"]) return self.variants[0].url
def fetch(self) -> None:
for variant in self.variants:
variant.fetch()
class ArtworkCollection:
"""
Stores all the images/artworks for one data object.
There could be duplicates before calling ArtworkCollection.compile()
_this is called before one object is downloaded automatically._
"""
artwork_type: PictureType = PictureType.OTHER
def __init__(
self,
*data: List[Artwork],
parent_artworks: Set[ArtworkCollection] = None,
crop_images: bool = True,
) -> None:
# this is used for the song artwork, to fall back to the song artwork
self.parent_artworks: Set[ArtworkCollection] = parent_artworks or set()
self.crop_images: bool = crop_images
self._data = []
self.extend(data)
def search_artwork(self, url: str) -> Optional[ArtworkVariant]:
for artwork in self._data:
if url in artwork:
return artwork
return None
def __contains__(self, other: str) -> bool:
return self.search_artwork(other) is not None
def _create_new_artwork(self, **kwargs) -> Tuple[Artwork, dict]:
kwargs["artwork_type"] = kwargs.get("artwork_type", self.artwork_type)
return create_dataclass_instance(Artwork, dict(**kwargs))
def add_data(self, url: str, **kwargs) -> Artwork:
kwargs["url"] = url
artwork = self.search_artwork(url)
if artwork is None:
artwork, kwargs = self._create_new_artwork(**kwargs)
self._data.append(artwork)
artwork.add_data(**kwargs)
return artwork
def append(self, value: Union[Artwork, ArtworkVariant, dict], **kwargs):
"""
You can append the types Artwork, ArtworkVariant or dict
the best option would be to use Artwork and avoid the other options.
"""
if isinstance(value, dict):
kwargs.update(value)
value, kwargs = create_dataclass_instance(ArtworkVariant, kwargs)
if isinstance(value, ArtworkVariant):
kwargs["variants"] = [value]
value, kwargs = create_dataclass_instance(Artwork, kwargs)
if isinstance(value, Artwork):
self._data.append(value)
return
def extend(self, values: List[Union[Artwork, ArtworkVariant, dict]], **kwargs):
for value in values:
self.append(value, **kwargs)
def compile(self, **kwargs) -> None:
"""
This will make the artworks ready for download and delete duplicates.
"""
artwork_hashes: list = list()
artwork_urls: list = list()
for artwork in self._data:
index = 0
for artwork_variant in artwork.variants:
r = artwork_connection.get(
url=artwork_variant.url,
name=artwork_variant.url,
)
if artwork_variant.url in artwork_urls:
artwork.variants.pop(index)
continue
artwork_urls.append(artwork_variant.url)
target: Target = artwork_variant.target
with target.open("wb") as f:
f.write(r.content)
with Image.open(target.file_path) as img:
# https://stackoverflow.com/a/59476938/16804841
if img.mode != 'RGB':
img = img.convert('RGB')
try:
image_hash = imagehash.crop_resistant_hash(img)
except Exception as e:
continue
if image_hash in artwork_hashes:
artwork.variants.pop(index)
target.delete()
continue
artwork_hashes.append(image_hash)
width, height = img.size
if width != height:
if width > height:
img = img.crop((width // 2 - height // 2, 0, width // 2 + height // 2, height))
else:
img = img.crop((0, height // 2 - width // 2, width, height // 2 + width // 2))
# resize the image to the preferred resolution
img.thumbnail((main_settings["preferred_artwork_resolution"], main_settings["preferred_artwork_resolution"]))
index =+ 1
def __merge__(self, other: ArtworkCollection, **kwargs) -> None:
self.parent_artworks.update(other.parent_artworks)
for other_artwork in other._data:
for other_variant in other_artwork.variants:
if self.__contains__(other_variant.url):
continue
self.append(ArtworkVariant(other_variant.url))
def __hash__(self) -> int:
return id(self)
def __iter__(self) -> Generator[Artwork, None, None]:
yield from self._data
def get_urls(self) -> Generator[str, None, None]:
yield from (artwork.url for artwork in self._data if artwork.url is not None)
def get_variant_name(self, variant: ArtworkVariant) -> str:
return f"artwork_{variant['width']}x{variant['height']}_{hash_url(variant['url']).replace('/', '_')}"
def __merge__(self, other: Artwork, **kwargs) -> None:
for key, value in other._variant_mapping.items():
if key not in self._variant_mapping:
self._variant_mapping[key] = value
def __eq__(self, other: Artwork) -> bool:
if not isinstance(other, Artwork):
return False
return any(a == b for a, b in zip(self._variant_mapping.keys(), other._variant_mapping.keys()))

View File

@ -1,16 +1,43 @@
from __future__ import annotations from __future__ import annotations
from collections import defaultdict
from typing import TypeVar, Generic, Dict, Optional, Iterable, List, Iterator, Tuple, Generator, Union, Any, Set
import copy import copy
from collections import defaultdict
from dataclasses import dataclass
from typing import (Any, Callable, Dict, Generator, Generic, Iterable,
Iterator, List, Optional, Set, Tuple, TypeVar, Union)
from .parents import OuterProxy from ..utils import BColors, object_trace, output
from ..utils import object_trace from .parents import InnerData, OuterProxy
from ..utils import output, BColors
T = TypeVar('T', bound=OuterProxy) T = TypeVar('T', bound=OuterProxy)
@dataclass
class AppendHookArguments:
"""
This class is used to store the arguments for the append hook.
The best explanation is with an examples:
```
album = Album()
song = Song()
album.song_collection.append(song)
```
In this case, the append hook is triggered with the following arguments:
```
AppendHookArguments(
collection=album.song_collection,
new_object=song,
collection_root_objects=[album]
)
```
"""
collection: Collection
new_object: T
collection_root_objects: Set[InnerData]
class Collection(Generic[T]): class Collection(Generic[T]):
__is_collection__ = True __is_collection__ = True
@ -27,6 +54,7 @@ class Collection(Generic[T]):
sync_on_append: Dict[str, Collection] = None, sync_on_append: Dict[str, Collection] = None,
append_object_to_attribute: Dict[str, T] = None, append_object_to_attribute: Dict[str, T] = None,
extend_object_to_attribute: Dict[str, Collection] = None, extend_object_to_attribute: Dict[str, Collection] = None,
append_callbacks: Set[Callable[[AppendHookArguments], None]] = None,
) -> None: ) -> None:
self._collection_for: dict = dict() self._collection_for: dict = dict()
@ -41,6 +69,7 @@ class Collection(Generic[T]):
self.sync_on_append: Dict[str, Collection] = sync_on_append or {} self.sync_on_append: Dict[str, Collection] = sync_on_append or {}
self.pull_from: List[Collection] = [] self.pull_from: List[Collection] = []
self.push_to: List[Collection] = [] self.push_to: List[Collection] = []
self.append_callbacks: Set[Callable[[AppendHookArguments], None]] = append_callbacks or set()
# This is to cleanly unmap previously mapped items by their id # This is to cleanly unmap previously mapped items by their id
self._indexed_from_id: Dict[int, Dict[str, Any]] = defaultdict(dict) self._indexed_from_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
@ -141,6 +170,14 @@ class Collection(Generic[T]):
for attribute, new_object in self.append_object_to_attribute.items(): for attribute, new_object in self.append_object_to_attribute.items():
other.__getattribute__(attribute).append(new_object, **kwargs) other.__getattribute__(attribute).append(new_object, **kwargs)
append_hook_args = AppendHookArguments(
collection=self,
new_object=other,
collection_root_objects=self._collection_for.keys(),
)
for callback in self.append_callbacks:
callback(append_hook_args)
def append(self, other: Optional[T], **kwargs): def append(self, other: Optional[T], **kwargs):
""" """
If an object, that represents the same entity exists in a relevant collection, If an object, that represents the same entity exists in a relevant collection,

View File

@ -1,35 +1,32 @@
from __future__ import annotations from __future__ import annotations
import copy
import random import random
from collections import defaultdict from collections import defaultdict
from typing import List, Optional, Dict, Tuple, Type, Union from typing import Dict, List, Optional, Tuple, Type, Union
import copy
import pycountry import pycountry
from ..utils.enums.album import AlbumType, AlbumStatus from ..utils.config import main_settings
from .collection import Collection from ..utils.enums.album import AlbumStatus, AlbumType
from .formatted_text import FormattedText from ..utils.enums.colors import BColors
from .lyrics import Lyrics
from .contact import Contact
from .artwork import Artwork
from .metadata import (
Mapping as id3Mapping,
ID3Timestamp,
Metadata
)
from .option import Options
from .parents import OuterProxy, P
from .source import Source, SourceCollection
from .target import Target
from .country import Language, Country
from ..utils.shared import DEBUG_PRINT_ID from ..utils.shared import DEBUG_PRINT_ID
from ..utils.string_processing import unify from ..utils.string_processing import unify
from .artwork import ArtworkCollection
from .collection import AppendHookArguments, Collection
from .contact import Contact
from .country import Country, Language
from .formatted_text import FormattedText
from .lyrics import Lyrics
from .metadata import ID3Timestamp
from .metadata import Mapping as id3Mapping
from .metadata import Metadata
from .option import Options
from .parents import OuterProxy
from .parents import OuterProxy as Base from .parents import OuterProxy as Base
from .parents import P
from ..utils.config import main_settings from .source import Source, SourceCollection
from ..utils.enums.colors import BColors from .target import Target
""" """
All Objects dependent All Objects dependent
@ -89,7 +86,7 @@ class Song(Base):
genre: str genre: str
note: FormattedText note: FormattedText
tracksort: int tracksort: int
artwork: Artwork artwork: ArtworkCollection
source_collection: SourceCollection source_collection: SourceCollection
target_collection: Collection[Target] target_collection: Collection[Target]
@ -105,7 +102,7 @@ class Song(Base):
"source_collection": SourceCollection, "source_collection": SourceCollection,
"target_collection": Collection, "target_collection": Collection,
"lyrics_collection": Collection, "lyrics_collection": Collection,
"artwork": Artwork, "artwork": ArtworkCollection,
"album_collection": Collection, "album_collection": Collection,
"artist_collection": Collection, "artist_collection": Collection,
@ -133,7 +130,7 @@ class Song(Base):
feature_artist_list: List[Artist] = None, feature_artist_list: List[Artist] = None,
album_list: List[Album] = None, album_list: List[Album] = None,
tracksort: int = 0, tracksort: int = 0,
artwork: Optional[Artwork] = None, artwork: Optional[ArtworkCollection] = None,
**kwargs **kwargs
) -> None: ) -> None:
real_kwargs = copy.copy(locals()) real_kwargs = copy.copy(locals())
@ -144,6 +141,14 @@ class Song(Base):
UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("artist_collection", "feature_artist_collection", "album_collection") UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("artist_collection", "feature_artist_collection", "album_collection")
TITEL = "title" TITEL = "title"
@staticmethod
def register_artwork_parent(append_hook_arguments: AppendHookArguments):
album: Album = append_hook_arguments.new_object
song: Song
for song in append_hook_arguments.collection_root_objects:
song.artwork.parent_artworks.add(album.artwork)
def __init_collections__(self) -> None: def __init_collections__(self) -> None:
self.feature_artist_collection.push_to = [self.artist_collection] self.feature_artist_collection.push_to = [self.artist_collection]
self.artist_collection.pull_from = [self.feature_artist_collection] self.artist_collection.pull_from = [self.feature_artist_collection]
@ -161,6 +166,7 @@ class Song(Base):
self.feature_artist_collection.extend_object_to_attribute = { self.feature_artist_collection.extend_object_to_attribute = {
"album_collection": self.album_collection "album_collection": self.album_collection
} }
self.album_collection.append_callbacks = set((Song.register_artwork_parent, ))
def _add_other_db_objects(self, object_type: Type[OuterProxy], object_list: List[OuterProxy]): def _add_other_db_objects(self, object_type: Type[OuterProxy], object_list: List[OuterProxy]):
if object_type is Song: if object_type is Song:
@ -178,6 +184,10 @@ class Song(Base):
self.album_collection.extend(object_list) self.album_collection.extend(object_list)
return return
def _compile(self):
self.artwork.compile()
INDEX_DEPENDS_ON = ("title", "isrc", "source_collection") INDEX_DEPENDS_ON = ("title", "isrc", "source_collection")
@property @property
@ -249,6 +259,7 @@ class Album(Base):
albumsort: int albumsort: int
notes: FormattedText notes: FormattedText
artwork: ArtworkCollection
source_collection: SourceCollection source_collection: SourceCollection
song_collection: Collection[Song] song_collection: Collection[Song]
@ -268,6 +279,7 @@ class Album(Base):
"date": ID3Timestamp, "date": ID3Timestamp,
"notes": FormattedText, "notes": FormattedText,
"artwork": lambda: ArtworkCollection(crop_images=False),
"source_collection": SourceCollection, "source_collection": SourceCollection,
"song_collection": Collection, "song_collection": Collection,
@ -290,6 +302,7 @@ class Album(Base):
barcode: str = None, barcode: str = None,
albumsort: int = None, albumsort: int = None,
notes: FormattedText = None, notes: FormattedText = None,
artwork: ArtworkCollection = None,
source_list: List[Source] = None, source_list: List[Source] = None,
artist_list: List[Artist] = None, artist_list: List[Artist] = None,
song_list: List[Song] = None, song_list: List[Song] = None,
@ -304,6 +317,13 @@ class Album(Base):
DOWNWARDS_COLLECTION_STRING_ATTRIBUTES = ("song_collection",) DOWNWARDS_COLLECTION_STRING_ATTRIBUTES = ("song_collection",)
UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("label_collection", "artist_collection") UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("label_collection", "artist_collection")
@staticmethod
def register_artwork_parent(append_hook_arguments: AppendHookArguments):
song: Song = append_hook_arguments.new_object
for root_object in append_hook_arguments.collection_root_objects:
song.artwork.parent_artworks.add(root_object.artwork)
def __init_collections__(self): def __init_collections__(self):
self.feature_artist_collection.push_to = [self.artist_collection] self.feature_artist_collection.push_to = [self.artist_collection]
self.artist_collection.pull_from = [self.feature_artist_collection] self.artist_collection.pull_from = [self.feature_artist_collection]
@ -322,6 +342,8 @@ class Album(Base):
"label_collection": self.label_collection "label_collection": self.label_collection
} }
self.song_collection.append_callbacks = set((Album.register_artwork_parent, ))
def _add_other_db_objects(self, object_type: Type[OuterProxy], object_list: List[OuterProxy]): def _add_other_db_objects(self, object_type: Type[OuterProxy], object_list: List[OuterProxy]):
if object_type is Song: if object_type is Song:
self.song_collection.extend(object_list) self.song_collection.extend(object_list)
@ -477,6 +499,8 @@ class Artist(Base):
general_genre: str general_genre: str
unformatted_location: str unformatted_location: str
artwork: ArtworkCollection
source_collection: SourceCollection source_collection: SourceCollection
contact_collection: Collection[Contact] contact_collection: Collection[Contact]
@ -493,6 +517,8 @@ class Artist(Base):
"lyrical_themes": list, "lyrical_themes": list,
"general_genre": lambda: "", "general_genre": lambda: "",
"artwork": ArtworkCollection,
"source_collection": SourceCollection, "source_collection": SourceCollection,
"album_collection": Collection, "album_collection": Collection,
"contact_collection": Collection, "contact_collection": Collection,
@ -511,6 +537,7 @@ class Artist(Base):
notes: FormattedText = None, notes: FormattedText = None,
lyrical_themes: List[str] = None, lyrical_themes: List[str] = None,
general_genre: str = None, general_genre: str = None,
artwork: ArtworkCollection = None,
unformatted_location: str = None, unformatted_location: str = None,
source_list: List[Source] = None, source_list: List[Source] = None,
contact_list: List[Contact] = None, contact_list: List[Contact] = None,

View File

@ -1,17 +1,17 @@
from __future__ import annotations from __future__ import annotations
from pathlib import Path
from typing import List, Tuple, TextIO, Union, Optional
import logging import logging
import random import random
from pathlib import Path
from typing import List, Optional, TextIO, Tuple, Union
import requests import requests
from tqdm import tqdm from tqdm import tqdm
from .parents import OuterProxy from ..utils.config import logging_settings, main_settings
from ..utils.shared import HIGHEST_ID from ..utils.shared import HIGHEST_ID
from ..utils.config import main_settings, logging_settings
from ..utils.string_processing import fit_to_file_system from ..utils.string_processing import fit_to_file_system
from .parents import OuterProxy
LOGGER = logging.getLogger("target") LOGGER = logging.getLogger("target")
@ -31,7 +31,8 @@ class Target(OuterProxy):
} }
@classmethod @classmethod
def temp(cls, name: str = str(random.randint(0, HIGHEST_ID)), file_extension: Optional[str] = None) -> P: def temp(cls, name: str = None, file_extension: Optional[str] = None) -> P:
name = name or str(random.randint(0, HIGHEST_ID))
if file_extension is not None: if file_extension is not None:
name = f"{name}.{file_extension}" name = f"{name}.{file_extension}"
@ -117,3 +118,11 @@ class Target(OuterProxy):
def read_bytes(self) -> bytes: def read_bytes(self) -> bytes:
return self.file_path.read_bytes() return self.file_path.read_bytes()
@property
def raw_content(self) -> bytes:
return self.file_path.read_bytes()
@raw_content.setter
def raw_content(self, content: bytes):
self.file_path.write_bytes(content)

View File

@ -1,33 +1,22 @@
from typing import List, Optional, Type
from urllib.parse import urlparse, urlunparse
import json import json
from enum import Enum from enum import Enum
from bs4 import BeautifulSoup from typing import List, Optional, Type
import pycountry from urllib.parse import urlparse, urlunparse
import pycountry
from bs4 import BeautifulSoup
from ..objects import Source, DatabaseObject
from .abstract import Page
from ..objects import (
Artist,
Source,
SourceType,
Song,
Album,
Label,
Target,
Contact,
ID3Timestamp,
Lyrics,
FormattedText,
Artwork,
)
from ..connection import Connection from ..connection import Connection
from ..objects import (Album, Artist, ArtworkCollection, Contact,
DatabaseObject, FormattedText, ID3Timestamp, Label,
Lyrics, Song, Source, SourceType, Target)
from ..utils import dump_to_file from ..utils import dump_to_file
from ..utils.enums import SourceType, ALL_SOURCE_TYPES from ..utils.config import logging_settings, main_settings
from ..utils.support_classes.download_result import DownloadResult from ..utils.enums import ALL_SOURCE_TYPES, SourceType
from ..utils.string_processing import clean_song_title
from ..utils.config import main_settings, logging_settings
from ..utils.shared import DEBUG from ..utils.shared import DEBUG
from ..utils.string_processing import clean_song_title
from ..utils.support_classes.download_result import DownloadResult
from .abstract import Page
if DEBUG: if DEBUG:
from ..utils import dump_to_file from ..utils import dump_to_file
@ -239,6 +228,11 @@ class Bandcamp(Page):
for subsoup in html_music_grid.find_all("li"): for subsoup in html_music_grid.find_all("li"):
artist.album_collection.append(self._parse_album(soup=subsoup, initial_source=source)) artist.album_collection.append(self._parse_album(soup=subsoup, initial_source=source))
# artist artwork
artist_artwork: BeautifulSoup = soup.find("img", {"class":"band-photo"})
if artist_artwork is not None:
artist.artwork.add_data(artist_artwork.get("data-src", artist_artwork.get("src")))
for i, data_blob_soup in enumerate(soup.find_all("div", {"id": ["pagedata", "collectors-data"]})): for i, data_blob_soup in enumerate(soup.find_all("div", {"id": ["pagedata", "collectors-data"]})):
data_blob = data_blob_soup["data-blob"] data_blob = data_blob_soup["data-blob"]
@ -253,7 +247,7 @@ class Bandcamp(Page):
artist.source_collection.append(source) artist.source_collection.append(source)
return artist return artist
def _parse_track_element(self, track: dict, artwork: Artwork) -> Optional[Song]: def _parse_track_element(self, track: dict, artwork: ArtworkCollection) -> Optional[Song]:
lyrics_list: List[Lyrics] = [] lyrics_list: List[Lyrics] = []
_lyrics: Optional[str] = track.get("item", {}).get("recordingOf", {}).get("lyrics", {}).get("text") _lyrics: Optional[str] = track.get("item", {}).get("recordingOf", {}).get("lyrics", {}).get("text")
@ -287,9 +281,15 @@ class Bandcamp(Page):
artist_source_list = [] artist_source_list = []
if "@id" in artist_data: if "@id" in artist_data:
artist_source_list = [Source(self.SOURCE_TYPE, _parse_artist_url(artist_data["@id"]))] artist_source_list = [Source(self.SOURCE_TYPE, _parse_artist_url(artist_data["@id"]))]
source_list: List[Source] = [source]
if "mainEntityOfPage" in data or "@id" in data:
source_list.append(Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"])))
album = Album( album = Album(
title=data["name"].strip(), title=data["name"].strip(),
source_list=[Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"]))], source_list=source_list,
date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"), date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"),
artist_list=[Artist( artist_list=[Artist(
name=artist_data["name"].strip(), name=artist_data["name"].strip(),
@ -297,7 +297,7 @@ class Bandcamp(Page):
)] )]
) )
artwork: Artwork = Artwork() artwork: ArtworkCollection = ArtworkCollection()
def _get_artwork_url(_data: dict) -> Optional[str]: def _get_artwork_url(_data: dict) -> Optional[str]:
if "image" in _data: if "image" in _data:
@ -308,15 +308,14 @@ class Bandcamp(Page):
_artwork_url = _get_artwork_url(data) _artwork_url = _get_artwork_url(data)
if _artwork_url is not None: if _artwork_url is not None:
artwork.append(url=_artwork_url, width=350, height=350) artwork.add_data(url=_artwork_url, width=350, height=350)
else: else:
for album_release in data.get("albumRelease", []): for album_release in data.get("albumRelease", []):
_artwork_url = _get_artwork_url(album_release) _artwork_url = _get_artwork_url(album_release)
if _artwork_url is not None: if _artwork_url is not None:
artwork.append(url=_artwork_url, width=350, height=350) artwork.add_data(url=_artwork_url, width=350, height=350)
break break
for i, track_json in enumerate(data.get("track", {}).get("itemListElement", [])): for i, track_json in enumerate(data.get("track", {}).get("itemListElement", [])):
if DEBUG: if DEBUG:
dump_to_file(f"album_track_{i}.json", json.dumps(track_json), is_json=True, exit_after_dump=False) dump_to_file(f"album_track_{i}.json", json.dumps(track_json), is_json=True, exit_after_dump=False)
@ -362,17 +361,29 @@ class Bandcamp(Page):
for key, value in other_data.get("trackinfo", [{}])[0].get("file", {"": None}).items(): for key, value in other_data.get("trackinfo", [{}])[0].get("file", {"": None}).items():
mp3_url = value mp3_url = value
source_list: List[Source] = [source]
if "mainEntityOfPage" in data or "@id" in data:
source_list.append(Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"]), audio_url=mp3_url))
source_list_album: List[Source] = [source]
if "@id" in album_data:
source_list_album.append(Source(self.SOURCE_TYPE, album_data["@id"]))
source_list_artist: List[Source] = [source]
if "@id" in artist_data:
source_list_artist.append(Source(self.SOURCE_TYPE, _parse_artist_url(artist_data["@id"])))
song = Song( song = Song(
title=clean_song_title(data["name"], artist_name=artist_data["name"]), title=clean_song_title(data["name"], artist_name=artist_data["name"]),
source_list=[source, Source(self.SOURCE_TYPE, data.get("mainEntityOfPage", data["@id"]), audio_url=mp3_url)], source_list=source_list,
album_list=[Album( album_list=[Album(
title=album_data["name"].strip(), title=album_data["name"].strip(),
date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"), date=ID3Timestamp.strptime(data["datePublished"], "%d %b %Y %H:%M:%S %Z"),
source_list=[Source(self.SOURCE_TYPE, album_data["@id"])] source_list=source_list_album
)], )],
artist_list=[Artist( artist_list=[Artist(
name=artist_data["name"].strip(), name=artist_data["name"].strip(),
source_list=[Source(self.SOURCE_TYPE, _parse_artist_url(artist_data["@id"]))] source_list=source_list_artist
)], )],
lyrics_list=self._fetch_lyrics(soup=soup) lyrics_list=self._fetch_lyrics(soup=soup)
) )

View File

@ -1,33 +1,23 @@
from typing import List, Optional, Type import simplejson as json
from urllib.parse import urlparse, urlunparse, urlencode from json_unescape import escape_json, unescape_json
import json
from enum import Enum from enum import Enum
from bs4 import BeautifulSoup from typing import List, Optional, Type
import pycountry from urllib.parse import urlencode, urlparse, urlunparse
import pycountry
from bs4 import BeautifulSoup
from ..objects import Source, DatabaseObject
from .abstract import Page
from ..objects import (
Artist,
Source,
SourceType,
Song,
Album,
Label,
Target,
Contact,
ID3Timestamp,
Lyrics,
FormattedText,
Artwork,
)
from ..connection import Connection from ..connection import Connection
from ..objects import (Album, Artist, ArtworkCollection, Contact,
DatabaseObject, FormattedText, ID3Timestamp, Label,
Lyrics, Song, Source, SourceType, Target)
from ..utils import dump_to_file, traverse_json_path from ..utils import dump_to_file, traverse_json_path
from ..utils.enums import SourceType, ALL_SOURCE_TYPES from ..utils.config import logging_settings, main_settings
from ..utils.support_classes.download_result import DownloadResult from ..utils.enums import ALL_SOURCE_TYPES, SourceType
from ..utils.string_processing import clean_song_title
from ..utils.config import main_settings, logging_settings
from ..utils.shared import DEBUG from ..utils.shared import DEBUG
from ..utils.string_processing import clean_song_title
from ..utils.support_classes.download_result import DownloadResult
from .abstract import Page
if DEBUG: if DEBUG:
from ..utils import dump_to_file from ..utils import dump_to_file
@ -56,34 +46,34 @@ class Genius(Page):
return Song return Song
def add_to_artwork(self, artwork: Artwork, url: str): def add_to_artwork(self, artwork: ArtworkCollection, url: str):
if url is None: if url is None:
return return
url_frags = url.split(".") url_frags = url.split(".")
if len(url_frags) < 2: if len(url_frags) < 2:
artwork.append(url=url) artwork.add_data(url=url)
return return
dimensions = url_frags[-2].split("x") dimensions = url_frags[-2].split("x")
if len(dimensions) < 2: if len(dimensions) < 2:
artwork.append(url=url) artwork.add_data(url=url)
return return
if len(dimensions) == 3: if len(dimensions) == 3:
dimensions = dimensions[:-1] dimensions = dimensions[:-1]
try: try:
artwork.append(url=url, width=int(dimensions[0]), height=int(dimensions[1])) artwork.add_data(url=url, width=int(dimensions[0]), height=int(dimensions[1]))
except ValueError: except ValueError:
artwork.append(url=url) artwork.add_data(url=url)
def parse_api_object(self, data: dict) -> Optional[DatabaseObject]: def parse_api_object(self, data: dict) -> Optional[DatabaseObject]:
if data is None: if data is None:
return None return None
object_type = data.get("_type") object_type = data.get("_type")
artwork = Artwork() artwork = ArtworkCollection()
self.add_to_artwork(artwork, data.get("header_image_url")) self.add_to_artwork(artwork, data.get("header_image_url"))
self.add_to_artwork(artwork, data.get("image_url")) self.add_to_artwork(artwork, data.get("image_url"))
@ -134,7 +124,7 @@ class Genius(Page):
source_list=[source], source_list=[source],
artist_list=[self.parse_api_object(data.get("artist"))], artist_list=[self.parse_api_object(data.get("artist"))],
artwork=artwork, artwork=artwork,
date=ID3Timestamp(**data.get("release_date_components", {})), date=ID3Timestamp(**(data.get("release_date_components") or {})),
) )
if object_type == "song": if object_type == "song":
@ -279,7 +269,8 @@ class Genius(Page):
# get the contents that are between `JSON.parse('` and `');` # get the contents that are between `JSON.parse('` and `');`
content = self.get_json_content_from_response(r, start="window.__PRELOADED_STATE__ = JSON.parse('", end="');\n window.__APP_CONFIG__ = ") content = self.get_json_content_from_response(r, start="window.__PRELOADED_STATE__ = JSON.parse('", end="');\n window.__APP_CONFIG__ = ")
if content is not None: if content is not None:
content = content.replace("\\\\", "\\").replace('\\"', '"').replace("\\'", "'") #IMPLEMENT FIX FROM HAZEL
content = escape_json(content)
data = json.loads(content) data = json.loads(content)
lyrics_html = traverse_json_path(data, "songPage.lyricsData.body.html", default=None) lyrics_html = traverse_json_path(data, "songPage.lyricsData.body.html", default=None)

View File

@ -1,34 +1,25 @@
from collections import defaultdict from collections import defaultdict
from dataclasses import dataclass from dataclasses import dataclass
from enum import Enum from enum import Enum
from typing import List, Optional, Type, Union, Generator, Dict, Any from typing import Any, Dict, Generator, List, Optional, Type, Union
from urllib.parse import urlparse from urllib.parse import urlparse
import pycountry import pycountry
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from ..connection import Connection from ..connection import Connection
from .abstract import Page from ..objects import (Album, Artist, DatabaseObject,
from ..utils.enums import SourceType, ALL_SOURCE_TYPES FormattedText, ID3Timestamp, Label, Lyrics, Song,
from ..utils.enums.album import AlbumType, AlbumStatus Source, Target)
from ..objects import ( from ..objects.artwork import (Artwork, ArtworkVariant, ArtworkCollection)
Artist, from ..utils import shared, string_processing
Source,
Song,
Album,
ID3Timestamp,
FormattedText,
Label,
Target,
DatabaseObject,
Lyrics,
Artwork
)
from ..utils.config import logging_settings, main_settings from ..utils.config import logging_settings, main_settings
from ..utils import string_processing, shared from ..utils.enums import ALL_SOURCE_TYPES, SourceType
from ..utils.enums.album import AlbumStatus, AlbumType
from ..utils.string_processing import clean_song_title from ..utils.string_processing import clean_song_title
from ..utils.support_classes.query import Query
from ..utils.support_classes.download_result import DownloadResult from ..utils.support_classes.download_result import DownloadResult
from ..utils.support_classes.query import Query
from .abstract import Page
""" """
https://musify.club/artist/ghost-bath-280348?_pjax=#bodyContent https://musify.club/artist/ghost-bath-280348?_pjax=#bodyContent
@ -457,17 +448,17 @@ class Musify(Page):
for album_info in soup.find_all("ul", {"class": "album-info"}): for album_info in soup.find_all("ul", {"class": "album-info"}):
list_element: BeautifulSoup = album_info.find("li") list_element: BeautifulSoup = album_info.find("li")
if list_element is not None: if list_element is not None:
artist_soup: BeautifulSoup artist_soup: BeautifulSoup
for artist_soup in list_element.find_all("a"): for artist_soup in list_element.find_all("a"):
artist_source_list = [] artist_source_list = []
href = artist_soup["href"] href = artist_soup["href"]
if href is not None: if href is not None:
artist_source_list = [Source(self.SOURCE_TYPE, self.HOST + href)] artist_source_list = [Source(self.SOURCE_TYPE, self.HOST + href)]
artist_list.append(Artist( artist_list.append(Artist(
name=artist_soup.text.strip(), name=artist_soup.text.strip(),
source_list=artist_source_list source_list=artist_source_list
)) ))
# breadcrums # breadcrums
breadcrumb_list_element_list: List[BeautifulSoup] = soup.find_all("ol", {"class": "breadcrumb"}) breadcrumb_list_element_list: List[BeautifulSoup] = soup.find_all("ol", {"class": "breadcrumb"})
@ -485,11 +476,11 @@ class Musify(Page):
track_name = list_points[4].text.strip() track_name = list_points[4].text.strip()
# artwork # album artwork
artwork: Artwork = Artwork() artwork: ArtworkCollection = ArtworkCollection()
album_image_element_list: List[BeautifulSoup] = soup.find_all("img", {"class": "album-img"}) album_image_element_list: List[BeautifulSoup] = soup.find_all("img", {"class": "album-img"})
for album_image_element in album_image_element_list: for album_image_element in album_image_element_list:
artwork.append(url=album_image_element.get("data-src", album_image_element.get("src"))) artwork.add_data(url=album_image_element.get("data-src", album_image_element.get("src")))
# lyrics # lyrics
lyrics_container: List[BeautifulSoup] = soup.find_all("div", {"id": "tabLyrics"}) lyrics_container: List[BeautifulSoup] = soup.find_all("div", {"id": "tabLyrics"})
@ -754,11 +745,18 @@ class Musify(Page):
except ValueError: except ValueError:
self.LOGGER.debug(f"Raw datetime doesn't match time format %Y-%m-%d: {raw_datetime}") self.LOGGER.debug(f"Raw datetime doesn't match time format %Y-%m-%d: {raw_datetime}")
# album artwork
album_artwork: ArtworkCollection = ArtworkCollection()
album_artwork_list: List[BeautifulSoup] = soup.find_all("img", {"class":"artist-img"})
for album_artwork in album_artwork_list:
album_artwork.add_data(url=album_artwork.get("data-src", album_artwork.get("src")))
return Album( return Album(
title=name, title=name,
source_list=source_list, source_list=source_list,
artist_list=artist_list, artist_list=artist_list,
date=date date=date,
artwork=album_artwork
) )
def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album: def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album:
@ -795,6 +793,8 @@ class Musify(Page):
new_song = self._parse_song_card(card_soup) new_song = self._parse_song_card(card_soup)
album.song_collection.append(new_song) album.song_collection.append(new_song)
album.update_tracksort() album.update_tracksort()
return album return album
@ -914,11 +914,18 @@ class Musify(Page):
if note_soup is not None: if note_soup is not None:
notes.html = note_soup.decode_contents() notes.html = note_soup.decode_contents()
# get artist profile artwork
main_artist_artwork: ArtworkCollection = ArtworkCollection()
artist_image_element_list: List[BeautifulSoup] = soup.find_all("img", {"class":"artist-img"})
for artist_image_element in artist_image_element_list:
main_artist_artwork.add_data(url=artist_image_element.get("data-src", artist_image_element.get("src")))
return Artist( return Artist(
name=name, name=name,
country=country, country=country,
source_list=source_list, source_list=source_list,
notes=notes notes=notes,
artwork=main_artist_artwork
) )
def _parse_album_card(self, album_card: BeautifulSoup, artist_name: str = None, **kwargs) -> Album: def _parse_album_card(self, album_card: BeautifulSoup, artist_name: str = None, **kwargs) -> Album:
@ -1056,19 +1063,29 @@ class Musify(Page):
artist.album_collection.append(album) artist.album_collection.append(album)
def _fetch_artist_artwork(self, source: str, artist: Artist, **kwargs):
# artist artwork
artwork_gallery = self.get_soup_from_response(self.connection.get(source.strip().strip("/") + "/photos"))
if artwork_gallery is not None:
gallery_body_content: BeautifulSoup = artwork_gallery.find(id="bodyContent")
gallery_image_element_list: List[BeautifulSoup] = gallery_body_content.find_all("img")
for gallery_image_element in gallery_image_element_list:
artist.artwork.append(ArtworkVariant(url=gallery_image_element.get("data-src", gallery_image_element.get("src")), width=247, heigth=247))
def fetch_artist(self, source: Source, **kwargs) -> Artist: def fetch_artist(self, source: Source, **kwargs) -> Artist:
""" """
TODO TODO
[x] discography [x] discography
[x] attributes [x] attributes
[] picture gallery [x] picture gallery
""" """
url = parse_url(source.url) url = parse_url(source.url)
artist = self._fetch_initial_artist(url, source=source, **kwargs) artist = self._fetch_initial_artist(url, source=source, **kwargs)
self._fetch_artist_discography(artist, url, artist.name, **kwargs) self._fetch_artist_discography(artist, url, artist.name, **kwargs)
self._fetch_artist_artwork(url.url, artist, **kwargs)
return artist return artist
def fetch_label(self, source: Source, stop_at_level: int = 1) -> Label: def fetch_label(self, source: Source, stop_at_level: int = 1) -> Label:

View File

@ -1,46 +1,33 @@
from __future__ import unicode_literals, annotations from __future__ import annotations, unicode_literals
from typing import Dict, List, Optional, Set, Type import json
from urllib.parse import urlparse, urlunparse, quote, parse_qs, urlencode
import logging import logging
import random import random
import json
from dataclasses import dataclass
import re import re
from functools import lru_cache
from collections import defaultdict from collections import defaultdict
from dataclasses import dataclass
from functools import lru_cache
from typing import Dict, List, Optional, Set, Type
from urllib.parse import parse_qs, quote, urlencode, urlparse, urlunparse
import youtube_dl import youtube_dl
from youtube_dl.extractor.youtube import YoutubeIE from youtube_dl.extractor.youtube import YoutubeIE
from youtube_dl.utils import DownloadError from youtube_dl.utils import DownloadError
from ...connection import Connection
from ...objects import Album, Artist, ArtworkCollection
from ...objects import DatabaseObject as DataObject
from ...objects import (FormattedText, ID3Timestamp, Label, Lyrics, Song,
Source, Target)
from ...utils import dump_to_file, get_current_millis, traverse_json_path
from ...utils.config import logging_settings, main_settings, youtube_settings
from ...utils.enums import ALL_SOURCE_TYPES, SourceType
from ...utils.enums.album import AlbumType
from ...utils.exception.config import SettingValueError from ...utils.exception.config import SettingValueError
from ...utils.config import main_settings, youtube_settings, logging_settings
from ...utils.shared import DEBUG, DEBUG_YOUTUBE_INITIALIZING from ...utils.shared import DEBUG, DEBUG_YOUTUBE_INITIALIZING
from ...utils.string_processing import clean_song_title from ...utils.string_processing import clean_song_title
from ...utils import get_current_millis, traverse_json_path
from ...utils import dump_to_file
from ..abstract import Page
from ...objects import (
DatabaseObject as DataObject,
Source,
FormattedText,
ID3Timestamp,
Artwork,
Artist,
Song,
Album,
Label,
Target,
Lyrics,
)
from ...connection import Connection
from ...utils.enums import SourceType, ALL_SOURCE_TYPES
from ...utils.enums.album import AlbumType
from ...utils.support_classes.download_result import DownloadResult from ...utils.support_classes.download_result import DownloadResult
from ..abstract import Page
from ._list_render import parse_renderer from ._list_render import parse_renderer
from ._music_object_render import parse_run_element from ._music_object_render import parse_run_element
from .super_youtube import SuperYouTube from .super_youtube import SuperYouTube
@ -438,6 +425,7 @@ class YoutubeMusic(SuperYouTube):
data: dict = r.json() data: dict = r.json()
header = data.get("header", {}) header = data.get("header", {})
musicDetailHeaderRenderer = header.get("musicDetailHeaderRenderer", {}) musicDetailHeaderRenderer = header.get("musicDetailHeaderRenderer", {})
musicImmersiveHeaderRenderer = header.get("musicImmersiveHeaderRenderer", {})
title_runs: List[dict] = musicDetailHeaderRenderer.get("title", {}).get("runs", []) title_runs: List[dict] = musicDetailHeaderRenderer.get("title", {}).get("runs", [])
subtitle_runs: List[dict] = musicDetailHeaderRenderer.get("subtitle", {}).get("runs", []) subtitle_runs: List[dict] = musicDetailHeaderRenderer.get("subtitle", {}).get("runs", [])
@ -450,6 +438,11 @@ class YoutubeMusic(SuperYouTube):
renderer_list = r.json().get("contents", {}).get("singleColumnBrowseResultsRenderer", {}).get("tabs", [{}])[ renderer_list = r.json().get("contents", {}).get("singleColumnBrowseResultsRenderer", {}).get("tabs", [{}])[
0].get("tabRenderer", {}).get("content", {}).get("sectionListRenderer", {}).get("contents", []) 0].get("tabRenderer", {}).get("content", {}).get("sectionListRenderer", {}).get("contents", [])
# fetch artist artwork
artist_thumbnails = musicImmersiveHeaderRenderer.get("thumbnail", {}).get("musicThumbnailRenderer", {}).get("thumbnail", {}).get("thumbnails", {})
for artist_thumbnail in artist_thumbnails:
artist.artwork.append(artist_thumbnail)
if DEBUG: if DEBUG:
for i, content in enumerate(renderer_list): for i, content in enumerate(renderer_list):
dump_to_file(f"{i}-artists-renderer.json", json.dumps(content), is_json=True, exit_after_dump=False) dump_to_file(f"{i}-artists-renderer.json", json.dumps(content), is_json=True, exit_after_dump=False)
@ -497,6 +490,11 @@ class YoutubeMusic(SuperYouTube):
header = data.get("header", {}) header = data.get("header", {})
musicDetailHeaderRenderer = header.get("musicDetailHeaderRenderer", {}) musicDetailHeaderRenderer = header.get("musicDetailHeaderRenderer", {})
# album artwork
album_thumbnails = musicDetailHeaderRenderer.get("thumbnail", {}).get("croppedSquareThumbnailRenderer", {}).get("thumbnail", {}).get("thumbnails", {})
for album_thumbnail in album_thumbnails:
album.artwork.append(value=album_thumbnail)
title_runs: List[dict] = musicDetailHeaderRenderer.get("title", {}).get("runs", []) title_runs: List[dict] = musicDetailHeaderRenderer.get("title", {}).get("runs", [])
subtitle_runs: List[dict] = musicDetailHeaderRenderer.get("subtitle", {}).get("runs", []) subtitle_runs: List[dict] = musicDetailHeaderRenderer.get("subtitle", {}).get("runs", [])
@ -549,6 +547,11 @@ class YoutubeMusic(SuperYouTube):
return album return album
def fetch_lyrics(self, video_id: str, playlist_id: str = None) -> str: def fetch_lyrics(self, video_id: str, playlist_id: str = None) -> str:
"""
1. fetches the tabs of a song, to get the browse id
2. finds the browse id of the lyrics
3. fetches the lyrics with the browse id
"""
request_data = { request_data = {
"context": {**self.credentials.context, "adSignalsInfo": {"params": []}}, "context": {**self.credentials.context, "adSignalsInfo": {"params": []}},
"videoId": video_id, "videoId": video_id,
@ -575,7 +578,8 @@ class YoutubeMusic(SuperYouTube):
pageType = traverse_json_path(tab, "tabRenderer.endpoint.browseEndpoint.browseEndpointContextSupportedConfigs.browseEndpointContextMusicConfig.pageType", default="") pageType = traverse_json_path(tab, "tabRenderer.endpoint.browseEndpoint.browseEndpointContextSupportedConfigs.browseEndpointContextMusicConfig.pageType", default="")
if pageType in ("MUSIC_TAB_TYPE_LYRICS", "MUSIC_PAGE_TYPE_TRACK_LYRICS") or "lyrics" in pageType.lower(): if pageType in ("MUSIC_TAB_TYPE_LYRICS", "MUSIC_PAGE_TYPE_TRACK_LYRICS") or "lyrics" in pageType.lower():
browse_id = traverse_json_path(tab, "tabRenderer.endpoint.browseEndpoint.browseId", default=None) browse_id = traverse_json_path(tab, "tabRenderer.endpoint.browseEndpoint.browseId", default=None)
break if browse_id is not None:
break
if browse_id is None: if browse_id is None:
return None return None
@ -640,7 +644,7 @@ class YoutubeMusic(SuperYouTube):
note=ydl_res.get("descriptions"), note=ydl_res.get("descriptions"),
album_list=album_list, album_list=album_list,
length=int(ydl_res.get("duration", 0)) * 1000, length=int(ydl_res.get("duration", 0)) * 1000,
artwork=Artwork(*ydl_res.get("thumbnails", [])), artwork=ArtworkCollection(*ydl_res.get("thumbnails", [])),
artist_list=artist_list, artist_list=artist_list,
source_list=[Source( source_list=[Source(
self.SOURCE_TYPE, self.SOURCE_TYPE,
@ -679,7 +683,7 @@ class YoutubeMusic(SuperYouTube):
for album in song.album_list: for album in song.album_list:
album.album_type = AlbumType.LIVE_ALBUM album.album_type = AlbumType.LIVE_ALBUM
for thumbnail in video_details.get("thumbnails", []): for thumbnail in video_details.get("thumbnails", []):
song.artwork.append(**thumbnail) song.artwork.add_data(**thumbnail)
song.lyrics_collection.append(self.fetch_lyrics(browse_id, playlist_id=request_data.get("playlistId"))) song.lyrics_collection.append(self.fetch_lyrics(browse_id, playlist_id=request_data.get("playlistId")))
@ -721,7 +725,6 @@ class YoutubeMusic(SuperYouTube):
self.download_values_by_url[source.url] = { self.download_values_by_url[source.url] = {
"url": _best_format.get("url"), "url": _best_format.get("url"),
"chunk_size": _best_format.get("downloader_options", {}).get("http_chunk_size", main_settings["chunk_size"]),
"headers": _best_format.get("http_headers", {}), "headers": _best_format.get("http_headers", {}),
} }

View File

@ -1,15 +1,18 @@
from datetime import datetime import inspect
from pathlib import Path
import json import json
import logging import logging
import inspect from datetime import datetime
from typing import List, Union from functools import lru_cache
from pathlib import Path
from typing import Any, List, Union
from .shared import DEBUG, DEBUG_LOGGING, DEBUG_DUMP, DEBUG_TRACE, DEBUG_OBJECT_TRACE, DEBUG_OBJECT_TRACE_CALLSTACK
from .config import config, read_config, write_config from .config import config, read_config, write_config
from .enums.colors import BColors from .enums.colors import BColors
from .path_manager import LOCATIONS
from .hacking import merge_args from .hacking import merge_args
from .path_manager import LOCATIONS
from .shared import (DEBUG, DEBUG_DUMP, DEBUG_LOGGING, DEBUG_OBJECT_TRACE,
DEBUG_OBJECT_TRACE_CALLSTACK, DEBUG_TRACE, URL_PATTERN)
from .string_processing import hash_url, is_url, unify
""" """
IO functions IO functions
@ -126,3 +129,34 @@ def get_current_millis() -> int:
def get_unix_time() -> int: def get_unix_time() -> int:
return int(datetime.now().timestamp()) return int(datetime.now().timestamp())
@lru_cache
def custom_hash(value: Any) -> int:
if is_url(value):
value = hash_url(value)
elif isinstance(value, str):
try:
value = int(value)
except ValueError:
value = unify(value)
return hash(value)
def create_dataclass_instance(t, data: dict):
"""Creates an instance of a dataclass with the given data.
It filters out all data key, which has no attribute in the dataclass.
Args:
t (Type): The dataclass type class
data (dict): the attribute to pass into the constructor
Returns:
Tuple[Type, dict]: The created instance and a dict, containing the data, which was not used in the creation
"""
needed_data = {k: v for k, v in data.items() if k in t.__dataclass_fields__}
removed_data = {k: v for k, v in data.items() if k not in t.__dataclass_fields__}
return t(**needed_data), removed_data

View File

@ -1,11 +1,8 @@
from typing import Tuple from typing import Tuple
from .config import Config from .config import Config
from .config_files import ( from .config_files import main_config, logging_config, youtube_config
main_config,
logging_config,
youtube_config,
)
_sections: Tuple[Config, ...] = ( _sections: Tuple[Config, ...] = (
main_config.config, main_config.config,

View File

@ -18,6 +18,7 @@ config = Config((
AudioFormatAttribute(name="audio_format", default_value="mp3", description="""Music Kraken will stream the audio into this format. AudioFormatAttribute(name="audio_format", default_value="mp3", description="""Music Kraken will stream the audio into this format.
You can use Audio formats which support ID3.2 and ID3.1, You can use Audio formats which support ID3.2 and ID3.1,
but you will have cleaner Metadata using ID3.2."""), but you will have cleaner Metadata using ID3.2."""),
Attribute(name="image_format", default_value="jpeg", description="This Changes the format in which images are getting downloaded"),
Attribute(name="result_history", default_value=True, description="""If enabled, you can go back to the previous results. Attribute(name="result_history", default_value=True, description="""If enabled, you can go back to the previous results.
The consequence is a higher meory consumption, because every result is saved."""), The consequence is a higher meory consumption, because every result is saved."""),
@ -28,6 +29,7 @@ The further you choose to be able to go back, the higher the memory usage.
EmptyLine(), EmptyLine(),
Attribute(name="preferred_artwork_resolution", default_value=1000), Attribute(name="preferred_artwork_resolution", default_value=1000),
Attribute(name="download_artist_artworks", default_value=True, description="Enables the fetching of artist galleries."),
EmptyLine(), EmptyLine(),
@ -44,6 +46,7 @@ This means for example, the Studio Albums and EP's are always in front of Single
- album_type - album_type
The folder music kraken should put the songs into."""), The folder music kraken should put the songs into."""),
Attribute(name="download_file", default_value="{song}.{audio_format}", description="The filename of the audio file."), Attribute(name="download_file", default_value="{song}.{audio_format}", description="The filename of the audio file."),
Attribute(name="artist_artwork_path", default_value="{genre}/{artist}/{artist}_{image_number}.{image_format}", description="The Path to download artist images to."),
SelectAttribute(name="album_type_blacklist", default_value=[ SelectAttribute(name="album_type_blacklist", default_value=[
"Compilation Album", "Compilation Album",
"Live Album", "Live Album",
@ -152,10 +155,13 @@ class SettingsStructure(TypedDict):
# artwork # artwork
preferred_artwork_resolution: int preferred_artwork_resolution: int
image_format: str
download_artist_artworks: bool
# paths # paths
music_directory: Path music_directory: Path
temp_directory: Path temp_directory: Path
artist_artwork_path: Path
log_file: Path log_file: Path
not_a_genre_regex: List[str] not_a_genre_regex: List[str]
ffmpeg_binary: Path ffmpeg_binary: Path

View File

@ -1,7 +1,11 @@
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional, TYPE_CHECKING, Type from enum import Enum
from typing import TYPE_CHECKING, Optional, Type
from mutagen.id3 import PictureType
if TYPE_CHECKING: if TYPE_CHECKING:
from ...pages.abstract import Page from ...pages.abstract import Page
@ -52,3 +56,73 @@ class ALL_SOURCE_TYPES:
MANUAL = SourceType(name="manual") MANUAL = SourceType(name="manual")
PRESET = SourceType(name="preset") PRESET = SourceType(name="preset")
class PictureType(Enum):
"""Enumeration of image types defined by the ID3 standard for the APIC
frame, but also reused in WMA/FLAC/VorbisComment.
This is copied from mutagen.id3.PictureType
"""
OTHER = 0
FILE_ICON = 1
"""32x32 pixels 'file icon' (PNG only)"""
OTHER_FILE_ICON = 2
"""Other file icon"""
COVER_FRONT = 3
"""Cover (front)"""
COVER_BACK = 4
"""Cover (back)"""
LEAFLET_PAGE = 5
"""Leaflet page"""
MEDIA = 6
"""Media (e.g. label side of CD)"""
LEAD_ARTIST = 7
"""Lead artist/lead performer/soloist"""
ARTIST = 8
"""Artist/performer"""
CONDUCTOR = 9
"""Conductor"""
BAND = 10
"""Band/Orchestra"""
COMPOSER = 11
"""Composer"""
LYRICIST = 12
"""Lyricist/text writer"""
RECORDING_LOCATION = 13
"""Recording Location"""
DURING_RECORDING = 14
"""During recording"""
DURING_PERFORMANCE = 15
"""During performance"""
SCREEN_CAPTURE = 16
"""Movie/video screen capture"""
FISH = 17
"""A bright colored fish"""
ILLUSTRATION = 18
"""Illustration"""
BAND_LOGOTYPE = 19
"""Band/artist logotype"""
PUBLISHER_LOGOTYPE = 20
"""Publisher/Studio logotype"""

View File

@ -1,13 +1,15 @@
from typing import Tuple, Union, Optional import re
from pathlib import Path
import string import string
from functools import lru_cache from functools import lru_cache
from pathlib import Path
from typing import Any, Optional, Tuple, Union
from urllib.parse import ParseResult, parse_qs, urlparse
from transliterate.exceptions import LanguageDetectionError
from transliterate import translit
from pathvalidate import sanitize_filename from pathvalidate import sanitize_filename
from urllib.parse import urlparse, ParseResult, parse_qs from transliterate import translit
from transliterate.exceptions import LanguageDetectionError
from .shared import URL_PATTERN
COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = ( COMMON_TITLE_APPENDIX_LIST: Tuple[str, ...] = (
"(official video)", "(official video)",
@ -229,3 +231,13 @@ def shorten_display_url(url: str, max_length: int = 150, chars_at_end: int = 4,
return url return url
return url[:max_length] + shorten_string + url[-chars_at_end:] return url[:max_length] + shorten_string + url[-chars_at_end:]
def is_url(value: Any) -> bool:
if isinstance(value, ParseResult):
return True
if not isinstance(value, str):
return True
# value has to be a string
return re.match(URL_PATTERN, value) is not None

View File

@ -1,9 +1,13 @@
from dataclasses import dataclass, field from __future__ import annotations
from typing import List, Tuple
from ...utils.config import main_settings, logging_settings from dataclasses import dataclass, field
from typing import TYPE_CHECKING, List, Tuple
if TYPE_CHECKING:
from ...objects import Target
from ...utils.config import logging_settings, main_settings
from ...utils.enums.colors import BColors from ...utils.enums.colors import BColors
from ...objects import Target
UNIT_PREFIXES: List[str] = ["", "k", "m", "g", "t"] UNIT_PREFIXES: List[str] = ["", "k", "m", "g", "t"]
UNIT_DIVISOR = 1024 UNIT_DIVISOR = 1024