fix: youtube client for single songs
This commit is contained in:
parent
7f2abdf572
commit
e187ffebaf
@ -22,4 +22,5 @@ guppy3~=3.1.3
|
||||
toml~=0.10.2
|
||||
typing_extensions~=4.7.1
|
||||
responses~=0.24.1
|
||||
youtube_dl
|
||||
youtube_dl
|
||||
merge_args~=0.1.5
|
@ -44,11 +44,11 @@ if __name__ == "__main__":
|
||||
"d: 0"
|
||||
]
|
||||
|
||||
bandcamp_test = [
|
||||
"s: #a Only Smile",
|
||||
"d: 7",
|
||||
commands = [
|
||||
"s: #a PTK",
|
||||
"d: 26",
|
||||
]
|
||||
|
||||
|
||||
music_kraken.cli.download(genre="test", command_list=bandcamp_test, process_metadata_anyway=True)
|
||||
music_kraken.cli.download(genre="test", command_list=commands, process_metadata_anyway=True)
|
||||
_ = "debug"
|
@ -1,54 +1,52 @@
|
||||
import logging
|
||||
import gc
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from rich.logging import RichHandler
|
||||
from rich.console import Console
|
||||
|
||||
from .utils.shared import DEBUG, DEBUG_LOGGING
|
||||
from .utils.config import logging_settings, main_settings, read_config
|
||||
|
||||
read_config()
|
||||
|
||||
console: Console = Console(width=220)
|
||||
def init_logging():
|
||||
log_file = main_settings['log_file']
|
||||
|
||||
if log_file.is_file():
|
||||
last_log_file = Path(log_file.parent, "prev." + log_file.name)
|
||||
|
||||
with log_file.open("r", encoding="utf-8") as current_file:
|
||||
with last_log_file.open("w", encoding="utf-8") as last_file:
|
||||
last_file.write(current_file.read())
|
||||
|
||||
rich_handler = RichHandler(rich_tracebacks=True, console=console)
|
||||
rich_handler.setLevel(logging_settings['log_level'] if not DEBUG_LOGGING else logging.DEBUG)
|
||||
|
||||
file_handler = logging.FileHandler(log_file)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
|
||||
# configure logger default
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format=logging_settings['logging_format'],
|
||||
datefmt="%Y-%m-%d %H:%M:%S",
|
||||
handlers=[
|
||||
file_handler,
|
||||
rich_handler,
|
||||
]
|
||||
)
|
||||
|
||||
init_logging()
|
||||
|
||||
from . import cli
|
||||
|
||||
if DEBUG:
|
||||
import sys
|
||||
|
||||
sys.setrecursionlimit(100)
|
||||
|
||||
|
||||
class CustomFormatter(logging.Formatter):
|
||||
grey = "\x1b[38;20m"
|
||||
yellow = "\x1b[33;20m"
|
||||
red = "\x1b[31;20m"
|
||||
bold_red = "\x1b[31;1m"
|
||||
reset = "\x1b[0m"
|
||||
format = logging_settings['logging_format']
|
||||
|
||||
FORMATS = {
|
||||
logging.DEBUG: grey + format + reset,
|
||||
logging.INFO: grey + format + reset,
|
||||
logging.WARNING: yellow + format + reset,
|
||||
logging.ERROR: red + format + reset,
|
||||
logging.CRITICAL: bold_red + format + reset
|
||||
}
|
||||
|
||||
def format(self, record):
|
||||
log_fmt = self.FORMATS.get(record.levelno)
|
||||
formatter = logging.Formatter(log_fmt)
|
||||
return formatter.format(record)
|
||||
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setFormatter(CustomFormatter())
|
||||
|
||||
# configure logger default
|
||||
logging.basicConfig(
|
||||
level=logging_settings['log_level'] if not DEBUG_LOGGING else logging.DEBUG,
|
||||
format=logging_settings['logging_format'],
|
||||
handlers=[
|
||||
logging.FileHandler(main_settings['log_file']),
|
||||
stream_handler
|
||||
]
|
||||
)
|
||||
|
||||
if main_settings['modify_gc']:
|
||||
"""
|
||||
At the start I modify the garbage collector to run a bit fewer times.
|
||||
@ -63,3 +61,5 @@ if main_settings['modify_gc']:
|
||||
gen1 = gen1 * 2
|
||||
gen2 = gen2 * 2
|
||||
gc.set_threshold(allocs, gen1, gen2)
|
||||
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
@ -9,13 +11,13 @@ import inspect
|
||||
import requests
|
||||
import responses
|
||||
from tqdm import tqdm
|
||||
import merge_args
|
||||
|
||||
from .cache import Cache
|
||||
from .rotating import RotatingProxy
|
||||
from ..objects import Target
|
||||
from ..utils.config import main_settings
|
||||
from ..utils.support_classes.download_result import DownloadResult
|
||||
from ..utils.hacking import merge_args
|
||||
|
||||
|
||||
class Connection:
|
||||
@ -144,6 +146,7 @@ class Connection:
|
||||
if method is None:
|
||||
raise AttributeError("method is not set.")
|
||||
method = method.upper()
|
||||
headers = dict() if headers is None else headers
|
||||
disable_cache = headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache
|
||||
accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes
|
||||
|
||||
@ -211,8 +214,7 @@ class Connection:
|
||||
self.lock = False
|
||||
|
||||
if not connection_failed:
|
||||
self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} "
|
||||
f"at {url}. ({try_count}-{self.TRIES})")
|
||||
self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})")
|
||||
if r is not None:
|
||||
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items()))
|
||||
self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items()))
|
||||
@ -228,7 +230,7 @@ class Connection:
|
||||
self.rotate()
|
||||
|
||||
current_kwargs["try_count"] = current_kwargs.get("try_count", 0) + 1
|
||||
return self.request(**current_kwargs)
|
||||
return Connection.request(**current_kwargs)
|
||||
|
||||
@merge_args(request)
|
||||
def get(self, *args, **kwargs) -> Optional[requests.Response]:
|
||||
@ -275,8 +277,8 @@ class Connection:
|
||||
r = self.request(
|
||||
url=url,
|
||||
name=name,
|
||||
chunk_size=chunk_size,
|
||||
method=method,
|
||||
stream=True,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
@ -304,7 +304,7 @@ class Collection(Generic[T]):
|
||||
|
||||
@property
|
||||
def empty(self) -> bool:
|
||||
return self.__len__() == 0
|
||||
return self.__len__() <= 0
|
||||
|
||||
def __iter__(self) -> Iterator[T]:
|
||||
for element in self._data:
|
||||
@ -321,12 +321,11 @@ class Collection(Generic[T]):
|
||||
if item < len(self._data):
|
||||
return self._data[item]
|
||||
|
||||
item = item - (len(self._data) - 1)
|
||||
item = item - len(self._data)
|
||||
|
||||
for c in self.children:
|
||||
if item < len(c):
|
||||
return c[item]
|
||||
|
||||
item = item - (len(self._data) - 1)
|
||||
return c.__getitem__(item)
|
||||
item = item - len(c._data)
|
||||
|
||||
raise IndexError
|
||||
|
@ -42,6 +42,7 @@ class Song(Base):
|
||||
length: int
|
||||
genre: str
|
||||
note: FormattedText
|
||||
tracksort: int
|
||||
|
||||
source_collection: SourceCollection
|
||||
target_collection: Collection[Target]
|
||||
@ -65,18 +66,17 @@ class Song(Base):
|
||||
"unified_title": lambda: None,
|
||||
"isrc": lambda: None,
|
||||
"genre": lambda: None,
|
||||
|
||||
"tracksort": lambda: 0,
|
||||
}
|
||||
|
||||
def __init__(self, title: str = "", unified_title: str = None, isrc: str = None, length: int = None,
|
||||
genre: str = None, note: FormattedText = None, source_list: List[Source] = None,
|
||||
target_list: List[Target] = None, lyrics_list: List[Lyrics] = None,
|
||||
main_artist_list: List[Artist] = None, feature_artist_list: List[Artist] = None,
|
||||
album_list: List[Album] = None, **kwargs) -> None:
|
||||
album_list: List[Album] = None, tracksort: int = 0, **kwargs) -> None:
|
||||
|
||||
super().__init__(title=title, unified_title=unified_title, isrc=isrc, length=length, genre=genre, note=note,
|
||||
source_list=source_list, target_list=target_list, lyrics_list=lyrics_list,
|
||||
main_artist_list=main_artist_list, feature_artist_list=feature_artist_list,
|
||||
album_list=album_list, **kwargs)
|
||||
Base.__init__(**locals())
|
||||
|
||||
UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("album_collection", "main_artist_collection", "feature_artist_collection")
|
||||
|
||||
|
@ -321,7 +321,7 @@ class Page:
|
||||
|
||||
if collection.empty:
|
||||
continue
|
||||
|
||||
|
||||
dom_ordered_music_object: DatabaseObject = collection[0]
|
||||
naming_dict.add_object(dom_ordered_music_object)
|
||||
return fill_naming_objects(dom_ordered_music_object)
|
||||
|
@ -187,7 +187,7 @@ class YoutubeMusic(SuperYouTube):
|
||||
SuperYouTube.__init__(self, *args, **kwargs)
|
||||
|
||||
self.download_connection: Connection = Connection(
|
||||
host="https://music.youtube.com/",
|
||||
host="https://rr2---sn-cxaf0x-nugl.googlevideo.com/",
|
||||
logger=self.LOGGER,
|
||||
sleep_after_404=youtube_settings["sleep_after_youtube_403"]
|
||||
)
|
||||
@ -514,9 +514,7 @@ class YoutubeMusic(SuperYouTube):
|
||||
self.LOGGER.warning(f"Couldn't fetch the audio source with the innertube api, falling back to invidious.")
|
||||
return super().download_song_to_target(source, target)
|
||||
|
||||
return self.download_connection.stream_into(source.audio_url, target, description=desc, headers={
|
||||
"Host": "rr1---sn-cxaf0x-nugl.googlevideo.com"
|
||||
}, raw_url=True, disable_cache=True)
|
||||
return self.download_connection.stream_into(source.audio_url, target, name=desc, raw_url=True, disable_cache=True)
|
||||
|
||||
def __del__(self):
|
||||
self.ydl.__exit__()
|
||||
|
Loading…
Reference in New Issue
Block a user