This commit is contained in:
		| @@ -1,255 +0,0 @@ | |||||||
| import logging |  | ||||||
| import re |  | ||||||
| from collections import defaultdict |  | ||||||
| from pathlib import Path |  | ||||||
| from typing import Dict, List, Optional, Set, Tuple, Type |  | ||||||
|  |  | ||||||
| from ..audio import correct_codec, write_metadata_to_target |  | ||||||
| from ..objects import Album, Artist, Collection |  | ||||||
| from ..objects import DatabaseObject as DataObject |  | ||||||
| from ..objects import Label, Options, Song, Source, Target |  | ||||||
| from ..pages import get_pages, scan_for_pages |  | ||||||
| from ..utils import BColors, output |  | ||||||
| from ..utils.config import main_settings, youtube_settings |  | ||||||
| from ..utils.enums import ALL_SOURCE_TYPES, SourceType |  | ||||||
| from ..utils.exception import MKMissingNameException |  | ||||||
| from ..utils.exception.download import UrlNotFoundException |  | ||||||
| from ..utils.path_manager import LOCATIONS |  | ||||||
| from ..utils.shared import DEBUG_PAGES |  | ||||||
| from ..utils.string_processing import fit_to_file_system |  | ||||||
| from ..utils.support_classes.download_result import DownloadResult |  | ||||||
| from ..utils.support_classes.query import Query |  | ||||||
| from . import DownloadOptions, FetchOptions |  | ||||||
| from .results import SearchResults |  | ||||||
|  |  | ||||||
| fetch_map = { |  | ||||||
|     Song: "fetch_song", |  | ||||||
|     Album: "fetch_album", |  | ||||||
|     Artist: "fetch_artist", |  | ||||||
|     Label: "fetch_label", |  | ||||||
| } |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Pages: |  | ||||||
|     def __init__(self, download_options: DownloadOptions = None, fetch_options: FetchOptions = None, **kwargs): |  | ||||||
|         self.LOGGER = logging.getLogger("download") |  | ||||||
|          |  | ||||||
|         self.download_options: DownloadOptions = download_options or DownloadOptions() |  | ||||||
|         self.fetch_options: FetchOptions = fetch_options or FetchOptions() |  | ||||||
|  |  | ||||||
|         scan_for_pages(download_options=self.download_options, fetch_options=self.fetch_options, **kwargs) |  | ||||||
|  |  | ||||||
|     def search(self, query: Query) -> SearchResults: |  | ||||||
|         result = SearchResults() |  | ||||||
|          |  | ||||||
|         for page in get_pages(): |  | ||||||
|             result.add( |  | ||||||
|                 page=type(page), |  | ||||||
|                 search_result=page.search(query=query) |  | ||||||
|             ) |  | ||||||
|              |  | ||||||
|         return result |  | ||||||
|      |  | ||||||
|     def fetch_details(self, data_object: DataObject, stop_at_level: int = 1, **kwargs) -> DataObject: |  | ||||||
|         source: Source |  | ||||||
|         for source in data_object.source_collection.get_sources(source_type_sorting={ |  | ||||||
|             "only_with_page": True, |  | ||||||
|         }): |  | ||||||
|             new_data_object = self.fetch_from_source(source=source, stop_at_level=stop_at_level) |  | ||||||
|             if new_data_object is not None: |  | ||||||
|                 data_object.merge(new_data_object) |  | ||||||
|  |  | ||||||
|         return data_object |  | ||||||
|  |  | ||||||
|     def fetch_from_source(self, source: Source, **kwargs) -> Optional[DataObject]: |  | ||||||
|         if not source.has_page: |  | ||||||
|             return None |  | ||||||
|          |  | ||||||
|         source_type = source.page.get_source_type(source=source) |  | ||||||
|         if source_type is None: |  | ||||||
|             self.LOGGER.debug(f"Could not determine source type for {source}.") |  | ||||||
|             return None |  | ||||||
|  |  | ||||||
|         func = getattr(source.page, fetch_map[source_type]) |  | ||||||
|          |  | ||||||
|         # fetching the data object and marking it as fetched |  | ||||||
|         data_object: DataObject = func(source=source, **kwargs) |  | ||||||
|         data_object.mark_as_fetched(source.hash_url) |  | ||||||
|         return data_object |  | ||||||
|  |  | ||||||
|     def fetch_from_url(self, url: str) -> Optional[DataObject]: |  | ||||||
|         source = Source.match_url(url, ALL_SOURCE_TYPES.MANUAL) |  | ||||||
|         if source is None: |  | ||||||
|             return None |  | ||||||
|          |  | ||||||
|         return self.fetch_from_source(source=source) |  | ||||||
|      |  | ||||||
|     def _skip_object(self, data_object: DataObject) -> bool: |  | ||||||
|         if isinstance(data_object, Album): |  | ||||||
|             if not self.download_options.download_all and data_object.album_type in self.download_options.album_type_blacklist: |  | ||||||
|                 return True |  | ||||||
|          |  | ||||||
|         return False |  | ||||||
|  |  | ||||||
|     def download(self, data_object: DataObject, genre: str, **kwargs) -> DownloadResult: |  | ||||||
|         # fetch the given object |  | ||||||
|         self.fetch_details(data_object) |  | ||||||
|         output(f"\nDownloading {data_object.option_string}...", color=BColors.BOLD) |  | ||||||
|          |  | ||||||
|         # fetching all parent objects (e.g. if you only download a song) |  | ||||||
|         if not kwargs.get("fetched_upwards", False): |  | ||||||
|             to_fetch: List[DataObject] = [data_object] |  | ||||||
|  |  | ||||||
|             while len(to_fetch) > 0: |  | ||||||
|                 new_to_fetch = [] |  | ||||||
|                 for d in to_fetch: |  | ||||||
|                     if self._skip_object(d): |  | ||||||
|                         continue |  | ||||||
|  |  | ||||||
|                     self.fetch_details(d) |  | ||||||
|  |  | ||||||
|                     for c in d.get_parent_collections(): |  | ||||||
|                         new_to_fetch.extend(c) |  | ||||||
|  |  | ||||||
|                 to_fetch = new_to_fetch |  | ||||||
|              |  | ||||||
|             kwargs["fetched_upwards"] = True |  | ||||||
|          |  | ||||||
|         # download all children |  | ||||||
|         download_result: DownloadResult = DownloadResult() |  | ||||||
|         for c in data_object.get_child_collections(): |  | ||||||
|             for d in c: |  | ||||||
|                 if self._skip_object(d): |  | ||||||
|                     continue |  | ||||||
|  |  | ||||||
|                 download_result.merge(self.download(d, genre, **kwargs)) |  | ||||||
|  |  | ||||||
|         # actually download if the object is a song |  | ||||||
|         if isinstance(data_object, Song): |  | ||||||
|             """ |  | ||||||
|             TODO |  | ||||||
|             add the traced artist and album to the naming. |  | ||||||
|             I am able to do that, because duplicate values are removed later on. |  | ||||||
|             """ |  | ||||||
|  |  | ||||||
|             self._download_song(data_object, naming={ |  | ||||||
|                 "genre": [genre], |  | ||||||
|                 "audio_format": [main_settings["audio_format"]], |  | ||||||
|             }) |  | ||||||
|  |  | ||||||
|         return download_result |  | ||||||
|  |  | ||||||
|     def _extract_fields_from_template(self, path_template: str) -> Set[str]: |  | ||||||
|         return set(re.findall(r"{([^}]+)}", path_template)) |  | ||||||
|  |  | ||||||
|     def _parse_path_template(self, path_template: str, naming: Dict[str, List[str]]) -> str: |  | ||||||
|         field_names: Set[str] = self._extract_fields_from_template(path_template) |  | ||||||
|          |  | ||||||
|         for field in field_names: |  | ||||||
|             if len(naming[field]) == 0: |  | ||||||
|                 raise MKMissingNameException(f"Missing field for {field}.") |  | ||||||
|  |  | ||||||
|             path_template = path_template.replace(f"{{{field}}}", naming[field][0]) |  | ||||||
|  |  | ||||||
|         return path_template |  | ||||||
|  |  | ||||||
|     def _download_song(self, song: Song, naming: dict) -> DownloadOptions: |  | ||||||
|         """ |  | ||||||
|         TODO |  | ||||||
|         Search the song in the file system. |  | ||||||
|         """ |  | ||||||
|         r = DownloadResult(total=1) |  | ||||||
|          |  | ||||||
|         # pre process the data recursively |  | ||||||
|         song.compile() |  | ||||||
|          |  | ||||||
|         # manage the naming |  | ||||||
|         naming: Dict[str, List[str]] = defaultdict(list, naming) |  | ||||||
|         naming["song"].append(song.title_value) |  | ||||||
|         naming["isrc"].append(song.isrc) |  | ||||||
|         naming["album"].extend(a.title_value for a in song.album_collection) |  | ||||||
|         naming["album_type"].extend(a.album_type.value for a in song.album_collection) |  | ||||||
|         naming["artist"].extend(a.name for a in song.artist_collection) |  | ||||||
|         naming["artist"].extend(a.name for a in song.feature_artist_collection) |  | ||||||
|         for a in song.album_collection: |  | ||||||
|             naming["label"].extend([l.title_value for l in a.label_collection]) |  | ||||||
|         # removing duplicates from the naming, and process the strings |  | ||||||
|         for key, value in naming.items(): |  | ||||||
|             # https://stackoverflow.com/a/17016257 |  | ||||||
|             naming[key] = list(dict.fromkeys(value)) |  | ||||||
|         song.genre = naming["genre"][0] |  | ||||||
|  |  | ||||||
|         # manage the targets |  | ||||||
|         tmp: Target = Target.temp(file_extension=main_settings["audio_format"]) |  | ||||||
|  |  | ||||||
|         song.target_collection.append(Target( |  | ||||||
|             relative_to_music_dir=True, |  | ||||||
|             file_path=Path( |  | ||||||
|                 self._parse_path_template(main_settings["download_path"], naming=naming),  |  | ||||||
|                 self._parse_path_template(main_settings["download_file"], naming=naming), |  | ||||||
|             ) |  | ||||||
|         )) |  | ||||||
|         for target in song.target_collection: |  | ||||||
|             if target.exists: |  | ||||||
|                 output(f'{target.file_path} {BColors.OKGREEN.value}[already exists]', color=BColors.GREY) |  | ||||||
|                 r.found_on_disk += 1 |  | ||||||
|  |  | ||||||
|                 if not self.download_options.download_again_if_found: |  | ||||||
|                     target.copy_content(tmp) |  | ||||||
|             else: |  | ||||||
|                 target.create_path() |  | ||||||
|                 output(f'{target.file_path}', color=BColors.GREY) |  | ||||||
|  |  | ||||||
|         # this streams from every available source until something succeeds, setting the skip intervals to the values of the according source |  | ||||||
|         used_source: Optional[Source] = None |  | ||||||
|         skip_intervals: List[Tuple[float, float]] = [] |  | ||||||
|         for source in song.source_collection.get_sources(source_type_sorting={ |  | ||||||
|             "only_with_page": True, |  | ||||||
|             "sort_key": lambda page: page.download_priority, |  | ||||||
|             "reverse": True, |  | ||||||
|         }): |  | ||||||
|             if tmp.exists: |  | ||||||
|                 break |  | ||||||
|  |  | ||||||
|             used_source = source |  | ||||||
|             streaming_results = source.page.download_song_to_target(source=source, target=tmp, desc="download") |  | ||||||
|             skip_intervals = source.page.get_skip_intervals(song=song, source=source) |  | ||||||
|  |  | ||||||
|             # if something has been downloaded but it somehow failed, delete the file |  | ||||||
|             if streaming_results.is_fatal_error and tmp.exists: |  | ||||||
|                 tmp.delete() |  | ||||||
|  |  | ||||||
|         # if everything went right, the file should exist now |  | ||||||
|         if not tmp.exists: |  | ||||||
|             if used_source is None: |  | ||||||
|                 r.error_message = f"No source found for {song.option_string}." |  | ||||||
|             else: |  | ||||||
|                 r.error_message = f"Something went wrong downloading {song.option_string}." |  | ||||||
|             return r |  | ||||||
|  |  | ||||||
|         # post process the audio |  | ||||||
|         found_on_disk = used_source is None |  | ||||||
|         if not found_on_disk or self.download_options.process_audio_if_found: |  | ||||||
|             correct_codec(target=tmp, skip_intervals=skip_intervals) |  | ||||||
|             r.sponsor_segments = len(skip_intervals) |  | ||||||
|  |  | ||||||
|         if used_source is not None: |  | ||||||
|             used_source.page.post_process_hook(song=song, temp_target=tmp) |  | ||||||
|  |  | ||||||
|         if not found_on_disk or self.download_options.process_metadata_if_found: |  | ||||||
|             write_metadata_to_target(metadata=song.metadata, target=tmp, song=song) |  | ||||||
|  |  | ||||||
|         # copy the tmp target to the final locations |  | ||||||
|         for target in song.target_collection: |  | ||||||
|             tmp.copy_content(target) |  | ||||||
|  |  | ||||||
|         tmp.delete() |  | ||||||
|         return r |  | ||||||
|  |  | ||||||
|     def fetch_url(self, url: str, **kwargs) -> DataObject: |  | ||||||
|         source = Source.match_url(url, ALL_SOURCE_TYPES.MANUAL) |  | ||||||
|          |  | ||||||
|         if source is None or source.page is None: |  | ||||||
|             raise UrlNotFoundException(url=url) |  | ||||||
|          |  | ||||||
|         return source.page.fetch_object_from_source(source=source, **kwargs) |  | ||||||
| @@ -1,8 +1,8 @@ | |||||||
| from typing import Tuple, Type, Dict, List, Generator, Union |  | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
|  | from typing import Dict, Generator, List, Tuple, Type, Union | ||||||
|  |  | ||||||
| from ..objects import DatabaseObject | from ..objects import DatabaseObject | ||||||
| from ..pages import Page, EncyclopaediaMetallum, Musify | from . import Page | ||||||
|  |  | ||||||
|  |  | ||||||
| @dataclass | @dataclass | ||||||
|   | |||||||
| @@ -1,55 +1,9 @@ | |||||||
| from typing import Type, Generator, Set, Dict, List |  | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  | from typing import Dict, Generator, List, Set, Type | ||||||
|  |  | ||||||
|  | from ._bandcamp import Bandcamp | ||||||
| from ._encyclopaedia_metallum import EncyclopaediaMetallum | from ._encyclopaedia_metallum import EncyclopaediaMetallum | ||||||
|  | from ._genius import Genius | ||||||
| from ._musify import Musify | from ._musify import Musify | ||||||
| from ._youtube import YouTube | from ._youtube import YouTube | ||||||
| from ._youtube_music import YoutubeMusic | from ._youtube_music import YoutubeMusic | ||||||
| from ._bandcamp import Bandcamp |  | ||||||
| from ._genius import Genius |  | ||||||
| from ._abstract import Page, INDEPENDENT_DB_OBJECTS |  | ||||||
|  |  | ||||||
|  |  | ||||||
| _registered_pages: Dict[Type[Page], Set[Page]] = defaultdict(set) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def get_pages(*page_types: List[Type[Page]]) -> Generator[Page, None, None]: |  | ||||||
|     if len(page_types) == 0: |  | ||||||
|         page_types = _registered_pages.keys() |  | ||||||
|  |  | ||||||
|     for page_type in page_types: |  | ||||||
|         yield from _registered_pages[page_type] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def register_page(page_type: Type[Page], **kwargs): |  | ||||||
|     if page_type in _registered_pages: |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     _registered_pages[page_type].add(page_type(**kwargs)) |  | ||||||
|  |  | ||||||
|  |  | ||||||
| def deregister_page(page_type: Type[Page]): |  | ||||||
|     if page_type not in _registered_pages: |  | ||||||
|         return |  | ||||||
|  |  | ||||||
|     for p in _registered_pages[page_type]: |  | ||||||
|         p.__del__() |  | ||||||
|     del _registered_pages[page_type] |  | ||||||
|  |  | ||||||
| def scan_for_pages(**kwargs): |  | ||||||
|     # assuming the wanted pages are the leaf classes of the interface |  | ||||||
|     leaf_classes = [] |  | ||||||
|  |  | ||||||
|     _class_list = [Page] |  | ||||||
|     while len(_class_list): |  | ||||||
|         _class = _class_list.pop() |  | ||||||
|         _class_subclasses = _class.__subclasses__() |  | ||||||
|  |  | ||||||
|         if len(_class_subclasses) == 0: |  | ||||||
|             if _class.REGISTER: |  | ||||||
|                 leaf_classes.append(_class) |  | ||||||
|         else: |  | ||||||
|             _class_list.extend(_class_subclasses) |  | ||||||
|  |  | ||||||
|     for leaf_class in leaf_classes: |  | ||||||
|         register_page(leaf_class, **kwargs) |  | ||||||
|   | |||||||
| @@ -1,152 +0,0 @@ | |||||||
| from __future__ import annotations |  | ||||||
|  |  | ||||||
| import logging |  | ||||||
| import random |  | ||||||
| import re |  | ||||||
| from copy import copy |  | ||||||
| from pathlib import Path |  | ||||||
| from typing import Optional, Union, Type, Dict, Set, List, Tuple, TypedDict, TYPE_CHECKING |  | ||||||
| from string import Formatter |  | ||||||
| from dataclasses import dataclass, field |  | ||||||
|  |  | ||||||
| import requests |  | ||||||
| from bs4 import BeautifulSoup |  | ||||||
|  |  | ||||||
| if TYPE_CHECKING: |  | ||||||
|     from ..download.page_attributes import DownloadOptions, FetchOptions |  | ||||||
| from ..connection import Connection |  | ||||||
| from ..objects import ( |  | ||||||
|     Song, |  | ||||||
|     Source, |  | ||||||
|     Album, |  | ||||||
|     Artist, |  | ||||||
|     Target, |  | ||||||
|     DatabaseObject, |  | ||||||
|     Options, |  | ||||||
|     Collection, |  | ||||||
|     Label, |  | ||||||
| ) |  | ||||||
| from ..utils.enums import SourceType |  | ||||||
| from ..utils.enums.album import AlbumType |  | ||||||
| from ..audio import write_metadata_to_target, correct_codec |  | ||||||
| from ..utils.config import main_settings |  | ||||||
| from ..utils.support_classes.query import Query |  | ||||||
| from ..utils.support_classes.download_result import DownloadResult |  | ||||||
| from ..utils.string_processing import fit_to_file_system |  | ||||||
| from ..utils import trace, output, BColors |  | ||||||
|  |  | ||||||
| INDEPENDENT_DB_OBJECTS = Union[Label, Album, Artist, Song] |  | ||||||
| INDEPENDENT_DB_TYPES = Union[Type[Song], Type[Album], Type[Artist], Type[Label]] |  | ||||||
|  |  | ||||||
|  |  | ||||||
| class Page: |  | ||||||
|     REGISTER = True |  | ||||||
|     SOURCE_TYPE: SourceType |  | ||||||
|     LOGGER: logging.Logger |  | ||||||
|  |  | ||||||
|     def __new__(cls, *args, **kwargs): |  | ||||||
|         cls.LOGGER = logging.getLogger(cls.__name__) |  | ||||||
|         return super().__new__(cls) |  | ||||||
|  |  | ||||||
|     def __init__(self, download_options: DownloadOptions = None, fetch_options: FetchOptions = None, **kwargs): |  | ||||||
|         self.SOURCE_TYPE.register_page(self) |  | ||||||
|          |  | ||||||
|         self.download_options: DownloadOptions = download_options or DownloadOptions() |  | ||||||
|         self.fetch_options: FetchOptions = fetch_options or FetchOptions() |  | ||||||
|  |  | ||||||
|     def __del__(self): |  | ||||||
|         self.SOURCE_TYPE.deregister_page() |  | ||||||
|  |  | ||||||
|     def _search_regex(self, pattern, string, default=None, fatal=True, flags=0, group=None): |  | ||||||
|         """ |  | ||||||
|         Perform a regex search on the given string, using a single or a list of |  | ||||||
|         patterns returning the first matching group. |  | ||||||
|         In case of failure return a default value or raise a WARNING or a |  | ||||||
|         RegexNotFoundError, depending on fatal, specifying the field name. |  | ||||||
|         """ |  | ||||||
|  |  | ||||||
|         if isinstance(pattern, str): |  | ||||||
|             mobj = re.search(pattern, string, flags) |  | ||||||
|         else: |  | ||||||
|             for p in pattern: |  | ||||||
|                 mobj = re.search(p, string, flags) |  | ||||||
|                 if mobj: |  | ||||||
|                     break |  | ||||||
|  |  | ||||||
|         if mobj: |  | ||||||
|             if group is None: |  | ||||||
|                 # return the first matching group |  | ||||||
|                 return next(g for g in mobj.groups() if g is not None) |  | ||||||
|             elif isinstance(group, (list, tuple)): |  | ||||||
|                 return tuple(mobj.group(g) for g in group) |  | ||||||
|             else: |  | ||||||
|                 return mobj.group(group) |  | ||||||
|  |  | ||||||
|         return default |  | ||||||
|  |  | ||||||
|     def get_source_type(self, source: Source) -> Optional[Type[DatabaseObject]]: |  | ||||||
|         return None |  | ||||||
|  |  | ||||||
|     def get_soup_from_response(self, r: requests.Response) -> BeautifulSoup: |  | ||||||
|         return BeautifulSoup(r.content, "html.parser") |  | ||||||
|  |  | ||||||
|     # to search stuff |  | ||||||
|     def search(self, query: Query) -> List[DatabaseObject]: |  | ||||||
|         music_object = query.music_object |  | ||||||
|  |  | ||||||
|         search_functions = { |  | ||||||
|             Song: self.song_search, |  | ||||||
|             Album: self.album_search, |  | ||||||
|             Artist: self.artist_search, |  | ||||||
|             Label: self.label_search |  | ||||||
|         } |  | ||||||
|  |  | ||||||
|         if type(music_object) in search_functions: |  | ||||||
|             r = search_functions[type(music_object)](music_object) |  | ||||||
|             if r is not None and len(r) > 0: |  | ||||||
|                 return r |  | ||||||
|  |  | ||||||
|         r = [] |  | ||||||
|         for default_query in query.default_search: |  | ||||||
|             for single_option in self.general_search(default_query): |  | ||||||
|                 r.append(single_option) |  | ||||||
|  |  | ||||||
|         return r |  | ||||||
|  |  | ||||||
|     def general_search(self, search_query: str) -> List[DatabaseObject]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def label_search(self, label: Label) -> List[Label]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def artist_search(self, artist: Artist) -> List[Artist]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def album_search(self, album: Album) -> List[Album]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def song_search(self, song: Song) -> List[Song]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     # to fetch stuff |  | ||||||
|     def fetch_song(self, source: Source, stop_at_level: int = 1) -> Song: |  | ||||||
|         return Song() |  | ||||||
|  |  | ||||||
|     def fetch_album(self, source: Source, stop_at_level: int = 1) -> Album: |  | ||||||
|         return Album() |  | ||||||
|  |  | ||||||
|     def fetch_artist(self, source: Source, stop_at_level: int = 1) -> Artist: |  | ||||||
|         return Artist() |  | ||||||
|  |  | ||||||
|     def fetch_label(self, source: Source, stop_at_level: int = 1) -> Label: |  | ||||||
|         return Label() |  | ||||||
|  |  | ||||||
|     # to download stuff |  | ||||||
|     def get_skip_intervals(self, song: Song, source: Source) -> List[Tuple[float, float]]: |  | ||||||
|         return [] |  | ||||||
|  |  | ||||||
|     def post_process_hook(self, song: Song, temp_target: Target, **kwargs): |  | ||||||
|         pass |  | ||||||
|  |  | ||||||
|     def download_song_to_target(self, source: Source, target: Target, desc: str = None) -> DownloadResult: |  | ||||||
|         return DownloadResult() |  | ||||||
| @@ -1,33 +1,22 @@ | |||||||
| from typing import List, Optional, Type |  | ||||||
| from urllib.parse import urlparse, urlunparse |  | ||||||
| import json | import json | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from bs4 import BeautifulSoup | from typing import List, Optional, Type | ||||||
| import pycountry | from urllib.parse import urlparse, urlunparse | ||||||
|  |  | ||||||
|  | import pycountry | ||||||
|  | from bs4 import BeautifulSoup | ||||||
|  |  | ||||||
| from ..objects import Source, DatabaseObject |  | ||||||
| from ._abstract import Page |  | ||||||
| from ..objects import ( |  | ||||||
|     Artist, |  | ||||||
|     Source, |  | ||||||
|     SourceType, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     Label, |  | ||||||
|     Target, |  | ||||||
|     Contact, |  | ||||||
|     ID3Timestamp, |  | ||||||
|     Lyrics, |  | ||||||
|     FormattedText, |  | ||||||
|     Artwork, |  | ||||||
| ) |  | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
|  | from ..download import Page | ||||||
|  | from ..objects import (Album, Artist, Artwork, Contact, DatabaseObject, | ||||||
|  |                        FormattedText, ID3Timestamp, Label, Lyrics, Song, | ||||||
|  |                        Source, SourceType, Target) | ||||||
| from ..utils import dump_to_file | from ..utils import dump_to_file | ||||||
| from ..utils.enums import SourceType, ALL_SOURCE_TYPES | from ..utils.config import logging_settings, main_settings | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
| from ..utils.string_processing import clean_song_title |  | ||||||
| from ..utils.config import main_settings, logging_settings |  | ||||||
| from ..utils.shared import DEBUG | from ..utils.shared import DEBUG | ||||||
|  | from ..utils.string_processing import clean_song_title | ||||||
|  | from ..utils.support_classes.download_result import DownloadResult | ||||||
|  |  | ||||||
| if DEBUG: | if DEBUG: | ||||||
|     from ..utils import dump_to_file |     from ..utils import dump_to_file | ||||||
|   | |||||||
| @@ -1,31 +1,20 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from typing import List, Optional, Dict, Type, Union | from typing import Dict, List, Optional, Type, Union | ||||||
| from bs4 import BeautifulSoup | from urllib.parse import urlencode, urlparse | ||||||
|  |  | ||||||
| import pycountry | import pycountry | ||||||
| from urllib.parse import urlparse, urlencode | from bs4 import BeautifulSoup | ||||||
|  |  | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
| from ..utils.config import logging_settings | from ..download import Page | ||||||
| from ._abstract import Page | from ..objects import (Album, Artist, DatabaseObject, FormattedText, | ||||||
| from ..utils.enums import SourceType, ALL_SOURCE_TYPES |                        ID3Timestamp, Label, Lyrics, Options, Song, Source) | ||||||
| from ..utils.enums.album import AlbumType |  | ||||||
| from ..utils.support_classes.query import Query |  | ||||||
| from ..objects import ( |  | ||||||
|     Lyrics, |  | ||||||
|     Artist, |  | ||||||
|     Source, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     ID3Timestamp, |  | ||||||
|     FormattedText, |  | ||||||
|     Label, |  | ||||||
|     Options, |  | ||||||
|     DatabaseObject |  | ||||||
| ) |  | ||||||
| from ..utils.shared import DEBUG |  | ||||||
| from ..utils import dump_to_file | from ..utils import dump_to_file | ||||||
|  | from ..utils.config import logging_settings | ||||||
|  | from ..utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
|  | from ..utils.enums.album import AlbumType | ||||||
|  | from ..utils.shared import DEBUG | ||||||
|  | from ..utils.support_classes.query import Query | ||||||
|  |  | ||||||
| ALBUM_TYPE_MAP: Dict[str, AlbumType] = defaultdict(lambda: AlbumType.OTHER, { | ALBUM_TYPE_MAP: Dict[str, AlbumType] = defaultdict(lambda: AlbumType.OTHER, { | ||||||
|     "Full-length": AlbumType.STUDIO_ALBUM, |     "Full-length": AlbumType.STUDIO_ALBUM, | ||||||
|   | |||||||
| @@ -1,33 +1,22 @@ | |||||||
| from typing import List, Optional, Type |  | ||||||
| from urllib.parse import urlparse, urlunparse, urlencode |  | ||||||
| import json | import json | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from bs4 import BeautifulSoup | from typing import List, Optional, Type | ||||||
| import pycountry | from urllib.parse import urlencode, urlparse, urlunparse | ||||||
|  |  | ||||||
|  | import pycountry | ||||||
|  | from bs4 import BeautifulSoup | ||||||
|  |  | ||||||
| from ..objects import Source, DatabaseObject |  | ||||||
| from ._abstract import Page |  | ||||||
| from ..objects import ( |  | ||||||
|     Artist, |  | ||||||
|     Source, |  | ||||||
|     SourceType, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     Label, |  | ||||||
|     Target, |  | ||||||
|     Contact, |  | ||||||
|     ID3Timestamp, |  | ||||||
|     Lyrics, |  | ||||||
|     FormattedText, |  | ||||||
|     Artwork, |  | ||||||
| ) |  | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
|  | from ..download import Page | ||||||
|  | from ..objects import (Album, Artist, Artwork, Contact, DatabaseObject, | ||||||
|  |                        FormattedText, ID3Timestamp, Label, Lyrics, Song, | ||||||
|  |                        Source, SourceType, Target) | ||||||
| from ..utils import dump_to_file, traverse_json_path | from ..utils import dump_to_file, traverse_json_path | ||||||
| from ..utils.enums import SourceType, ALL_SOURCE_TYPES | from ..utils.config import logging_settings, main_settings | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
| from ..utils.string_processing import clean_song_title |  | ||||||
| from ..utils.config import main_settings, logging_settings |  | ||||||
| from ..utils.shared import DEBUG | from ..utils.shared import DEBUG | ||||||
|  | from ..utils.string_processing import clean_song_title | ||||||
|  | from ..utils.support_classes.download_result import DownloadResult | ||||||
|  |  | ||||||
| if DEBUG: | if DEBUG: | ||||||
|     from ..utils import dump_to_file |     from ..utils import dump_to_file | ||||||
|   | |||||||
| @@ -1,34 +1,23 @@ | |||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
| from enum import Enum | from enum import Enum | ||||||
| from typing import List, Optional, Type, Union, Generator, Dict, Any | from typing import Any, Dict, Generator, List, Optional, Type, Union | ||||||
| from urllib.parse import urlparse | from urllib.parse import urlparse | ||||||
|  |  | ||||||
| import pycountry | import pycountry | ||||||
| from bs4 import BeautifulSoup | from bs4 import BeautifulSoup | ||||||
|  |  | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
| from ._abstract import Page | from ..download import Page | ||||||
| from ..utils.enums import SourceType, ALL_SOURCE_TYPES | from ..objects import (Album, Artist, Artwork, DatabaseObject, FormattedText, | ||||||
| from ..utils.enums.album import AlbumType, AlbumStatus |                        ID3Timestamp, Label, Lyrics, Song, Source, Target) | ||||||
| from ..objects import ( | from ..utils import shared, string_processing | ||||||
|     Artist, |  | ||||||
|     Source, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     ID3Timestamp, |  | ||||||
|     FormattedText, |  | ||||||
|     Label, |  | ||||||
|     Target, |  | ||||||
|     DatabaseObject, |  | ||||||
|     Lyrics, |  | ||||||
|     Artwork |  | ||||||
| ) |  | ||||||
| from ..utils.config import logging_settings, main_settings | from ..utils.config import logging_settings, main_settings | ||||||
| from ..utils import string_processing, shared | from ..utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
|  | from ..utils.enums.album import AlbumStatus, AlbumType | ||||||
| from ..utils.string_processing import clean_song_title | from ..utils.string_processing import clean_song_title | ||||||
| from ..utils.support_classes.query import Query |  | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
|  | from ..utils.support_classes.query import Query | ||||||
|  |  | ||||||
| """ | """ | ||||||
| https://musify.club/artist/ghost-bath-280348?_pjax=#bodyContent | https://musify.club/artist/ghost-bath-280348?_pjax=#bodyContent | ||||||
|   | |||||||
| @@ -1,29 +1,19 @@ | |||||||
| from typing import List, Optional, Type, Tuple |  | ||||||
| from urllib.parse import urlparse, urlunparse, parse_qs |  | ||||||
| from enum import Enum | from enum import Enum | ||||||
|  | from typing import List, Optional, Tuple, Type | ||||||
|  | from urllib.parse import parse_qs, urlparse, urlunparse | ||||||
|  |  | ||||||
| import python_sponsorblock | import python_sponsorblock | ||||||
|  |  | ||||||
| from ..objects import Source, DatabaseObject, Song, Target |  | ||||||
| from ._abstract import Page |  | ||||||
| from ..objects import ( |  | ||||||
|     Artist, |  | ||||||
|     Source, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     Label, |  | ||||||
|     Target, |  | ||||||
|     FormattedText, |  | ||||||
|     ID3Timestamp |  | ||||||
| ) |  | ||||||
| from ..connection import Connection | from ..connection import Connection | ||||||
|  | from ..download import Page | ||||||
|  | from ..objects import (Album, Artist, DatabaseObject, FormattedText, | ||||||
|  |                        ID3Timestamp, Label, Song, Source, Target) | ||||||
|  | from ..utils.config import logging_settings, main_settings, youtube_settings | ||||||
|  | from ..utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
| from ..utils.string_processing import clean_song_title | from ..utils.string_processing import clean_song_title | ||||||
| from ..utils.enums import SourceType, ALL_SOURCE_TYPES |  | ||||||
| from ..utils.support_classes.download_result import DownloadResult | from ..utils.support_classes.download_result import DownloadResult | ||||||
| from ..utils.config import youtube_settings, main_settings, logging_settings | from ._youtube_music.super_youtube import (SuperYouTube, YouTubeUrl, | ||||||
|  |                                            YouTubeUrlType, get_invidious_url) | ||||||
| from ._youtube_music.super_youtube import SuperYouTube, YouTubeUrl, get_invidious_url, YouTubeUrlType |  | ||||||
|  |  | ||||||
|  |  | ||||||
| """ | """ | ||||||
| - https://yt.artemislena.eu/api/v1/search?q=Zombiez+-+Topic&page=1&date=none&type=channel&duration=none&sort=relevance | - https://yt.artemislena.eu/api/v1/search?q=Zombiez+-+Topic&page=1&date=none&type=channel&duration=none&sort=relevance | ||||||
|   | |||||||
| @@ -1,46 +1,33 @@ | |||||||
| from __future__ import unicode_literals, annotations | from __future__ import annotations, unicode_literals | ||||||
|  |  | ||||||
| from typing import Dict, List, Optional, Set, Type | import json | ||||||
| from urllib.parse import urlparse, urlunparse, quote, parse_qs, urlencode |  | ||||||
| import logging | import logging | ||||||
| import random | import random | ||||||
| import json |  | ||||||
| from dataclasses import dataclass |  | ||||||
| import re | import re | ||||||
| from functools import lru_cache |  | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  | from dataclasses import dataclass | ||||||
|  | from functools import lru_cache | ||||||
|  | from typing import Dict, List, Optional, Set, Type | ||||||
|  | from urllib.parse import parse_qs, quote, urlencode, urlparse, urlunparse | ||||||
|  |  | ||||||
| import youtube_dl | import youtube_dl | ||||||
| from youtube_dl.extractor.youtube import YoutubeIE | from youtube_dl.extractor.youtube import YoutubeIE | ||||||
| from youtube_dl.utils import DownloadError | from youtube_dl.utils import DownloadError | ||||||
|  |  | ||||||
|  | from ...connection import Connection | ||||||
|  | from ...download import Page | ||||||
|  | from ...objects import Album, Artist, Artwork | ||||||
|  | from ...objects import DatabaseObject as DataObject | ||||||
|  | from ...objects import (FormattedText, ID3Timestamp, Label, Lyrics, Song, | ||||||
|  |                         Source, Target) | ||||||
|  | from ...utils import dump_to_file, get_current_millis, traverse_json_path | ||||||
|  | from ...utils.config import logging_settings, main_settings, youtube_settings | ||||||
|  | from ...utils.enums import ALL_SOURCE_TYPES, SourceType | ||||||
|  | from ...utils.enums.album import AlbumType | ||||||
| from ...utils.exception.config import SettingValueError | from ...utils.exception.config import SettingValueError | ||||||
| from ...utils.config import main_settings, youtube_settings, logging_settings |  | ||||||
| from ...utils.shared import DEBUG, DEBUG_YOUTUBE_INITIALIZING | from ...utils.shared import DEBUG, DEBUG_YOUTUBE_INITIALIZING | ||||||
| from ...utils.string_processing import clean_song_title | from ...utils.string_processing import clean_song_title | ||||||
| from ...utils import get_current_millis, traverse_json_path |  | ||||||
|  |  | ||||||
| from ...utils import dump_to_file |  | ||||||
|  |  | ||||||
| from .._abstract import Page |  | ||||||
| from ...objects import ( |  | ||||||
|     DatabaseObject as DataObject, |  | ||||||
|     Source, |  | ||||||
|     FormattedText, |  | ||||||
|     ID3Timestamp, |  | ||||||
|     Artwork, |  | ||||||
|     Artist, |  | ||||||
|     Song, |  | ||||||
|     Album, |  | ||||||
|     Label, |  | ||||||
|     Target, |  | ||||||
|     Lyrics, |  | ||||||
| ) |  | ||||||
| from ...connection import Connection |  | ||||||
| from ...utils.enums import SourceType, ALL_SOURCE_TYPES |  | ||||||
| from ...utils.enums.album import AlbumType |  | ||||||
| from ...utils.support_classes.download_result import DownloadResult | from ...utils.support_classes.download_result import DownloadResult | ||||||
|  |  | ||||||
| from ._list_render import parse_renderer | from ._list_render import parse_renderer | ||||||
| from ._music_object_render import parse_run_element | from ._music_object_render import parse_run_element | ||||||
| from .super_youtube import SuperYouTube | from .super_youtube import SuperYouTube | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user