fix: youtube client for single songs

This commit is contained in:
Hazel 2024-02-28 14:27:35 +01:00
parent 7f2abdf572
commit e187ffebaf
8 changed files with 62 additions and 62 deletions

View File

@ -23,3 +23,4 @@ toml~=0.10.2
typing_extensions~=4.7.1 typing_extensions~=4.7.1
responses~=0.24.1 responses~=0.24.1
youtube_dl youtube_dl
merge_args~=0.1.5

View File

@ -44,11 +44,11 @@ if __name__ == "__main__":
"d: 0" "d: 0"
] ]
bandcamp_test = [ commands = [
"s: #a Only Smile", "s: #a PTK",
"d: 7", "d: 26",
] ]
music_kraken.cli.download(genre="test", command_list=bandcamp_test, process_metadata_anyway=True) music_kraken.cli.download(genre="test", command_list=commands, process_metadata_anyway=True)
_ = "debug" _ = "debug"

View File

@ -1,54 +1,52 @@
import logging import logging
import gc import gc
import sys import sys
from pathlib import Path
from rich.logging import RichHandler
from rich.console import Console
from .utils.shared import DEBUG, DEBUG_LOGGING from .utils.shared import DEBUG, DEBUG_LOGGING
from .utils.config import logging_settings, main_settings, read_config from .utils.config import logging_settings, main_settings, read_config
read_config() read_config()
console: Console = Console(width=220)
def init_logging():
log_file = main_settings['log_file']
if log_file.is_file():
last_log_file = Path(log_file.parent, "prev." + log_file.name)
with log_file.open("r", encoding="utf-8") as current_file:
with last_log_file.open("w", encoding="utf-8") as last_file:
last_file.write(current_file.read())
rich_handler = RichHandler(rich_tracebacks=True, console=console)
rich_handler.setLevel(logging_settings['log_level'] if not DEBUG_LOGGING else logging.DEBUG)
file_handler = logging.FileHandler(log_file)
file_handler.setLevel(logging.DEBUG)
# configure logger default
logging.basicConfig(
level=logging.DEBUG,
format=logging_settings['logging_format'],
datefmt="%Y-%m-%d %H:%M:%S",
handlers=[
file_handler,
rich_handler,
]
)
init_logging()
from . import cli from . import cli
if DEBUG: if DEBUG:
import sys
sys.setrecursionlimit(100) sys.setrecursionlimit(100)
class CustomFormatter(logging.Formatter):
grey = "\x1b[38;20m"
yellow = "\x1b[33;20m"
red = "\x1b[31;20m"
bold_red = "\x1b[31;1m"
reset = "\x1b[0m"
format = logging_settings['logging_format']
FORMATS = {
logging.DEBUG: grey + format + reset,
logging.INFO: grey + format + reset,
logging.WARNING: yellow + format + reset,
logging.ERROR: red + format + reset,
logging.CRITICAL: bold_red + format + reset
}
def format(self, record):
log_fmt = self.FORMATS.get(record.levelno)
formatter = logging.Formatter(log_fmt)
return formatter.format(record)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(CustomFormatter())
# configure logger default
logging.basicConfig(
level=logging_settings['log_level'] if not DEBUG_LOGGING else logging.DEBUG,
format=logging_settings['logging_format'],
handlers=[
logging.FileHandler(main_settings['log_file']),
stream_handler
]
)
if main_settings['modify_gc']: if main_settings['modify_gc']:
""" """
At the start I modify the garbage collector to run a bit fewer times. At the start I modify the garbage collector to run a bit fewer times.
@ -63,3 +61,5 @@ if main_settings['modify_gc']:
gen1 = gen1 * 2 gen1 = gen1 * 2
gen2 = gen2 * 2 gen2 = gen2 * 2
gc.set_threshold(allocs, gen1, gen2) gc.set_threshold(allocs, gen1, gen2)

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import logging import logging
import threading import threading
import time import time
@ -9,13 +11,13 @@ import inspect
import requests import requests
import responses import responses
from tqdm import tqdm from tqdm import tqdm
import merge_args
from .cache import Cache from .cache import Cache
from .rotating import RotatingProxy from .rotating import RotatingProxy
from ..objects import Target from ..objects import Target
from ..utils.config import main_settings from ..utils.config import main_settings
from ..utils.support_classes.download_result import DownloadResult from ..utils.support_classes.download_result import DownloadResult
from ..utils.hacking import merge_args
class Connection: class Connection:
@ -144,6 +146,7 @@ class Connection:
if method is None: if method is None:
raise AttributeError("method is not set.") raise AttributeError("method is not set.")
method = method.upper() method = method.upper()
headers = dict() if headers is None else headers
disable_cache = headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache disable_cache = headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache
accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes
@ -211,8 +214,7 @@ class Connection:
self.lock = False self.lock = False
if not connection_failed: if not connection_failed:
self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} " self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})")
f"at {url}. ({try_count}-{self.TRIES})")
if r is not None: if r is not None:
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items())) self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items()))
self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items())) self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items()))
@ -228,7 +230,7 @@ class Connection:
self.rotate() self.rotate()
current_kwargs["try_count"] = current_kwargs.get("try_count", 0) + 1 current_kwargs["try_count"] = current_kwargs.get("try_count", 0) + 1
return self.request(**current_kwargs) return Connection.request(**current_kwargs)
@merge_args(request) @merge_args(request)
def get(self, *args, **kwargs) -> Optional[requests.Response]: def get(self, *args, **kwargs) -> Optional[requests.Response]:
@ -275,8 +277,8 @@ class Connection:
r = self.request( r = self.request(
url=url, url=url,
name=name, name=name,
chunk_size=chunk_size,
method=method, method=method,
stream=True,
**kwargs **kwargs
) )

View File

@ -304,7 +304,7 @@ class Collection(Generic[T]):
@property @property
def empty(self) -> bool: def empty(self) -> bool:
return self.__len__() == 0 return self.__len__() <= 0
def __iter__(self) -> Iterator[T]: def __iter__(self) -> Iterator[T]:
for element in self._data: for element in self._data:
@ -321,12 +321,11 @@ class Collection(Generic[T]):
if item < len(self._data): if item < len(self._data):
return self._data[item] return self._data[item]
item = item - (len(self._data) - 1) item = item - len(self._data)
for c in self.children: for c in self.children:
if item < len(c): if item < len(c):
return c[item] return c.__getitem__(item)
item = item - len(c._data)
item = item - (len(self._data) - 1)
raise IndexError raise IndexError

View File

@ -42,6 +42,7 @@ class Song(Base):
length: int length: int
genre: str genre: str
note: FormattedText note: FormattedText
tracksort: int
source_collection: SourceCollection source_collection: SourceCollection
target_collection: Collection[Target] target_collection: Collection[Target]
@ -65,18 +66,17 @@ class Song(Base):
"unified_title": lambda: None, "unified_title": lambda: None,
"isrc": lambda: None, "isrc": lambda: None,
"genre": lambda: None, "genre": lambda: None,
"tracksort": lambda: 0,
} }
def __init__(self, title: str = "", unified_title: str = None, isrc: str = None, length: int = None, def __init__(self, title: str = "", unified_title: str = None, isrc: str = None, length: int = None,
genre: str = None, note: FormattedText = None, source_list: List[Source] = None, genre: str = None, note: FormattedText = None, source_list: List[Source] = None,
target_list: List[Target] = None, lyrics_list: List[Lyrics] = None, target_list: List[Target] = None, lyrics_list: List[Lyrics] = None,
main_artist_list: List[Artist] = None, feature_artist_list: List[Artist] = None, main_artist_list: List[Artist] = None, feature_artist_list: List[Artist] = None,
album_list: List[Album] = None, **kwargs) -> None: album_list: List[Album] = None, tracksort: int = 0, **kwargs) -> None:
super().__init__(title=title, unified_title=unified_title, isrc=isrc, length=length, genre=genre, note=note, Base.__init__(**locals())
source_list=source_list, target_list=target_list, lyrics_list=lyrics_list,
main_artist_list=main_artist_list, feature_artist_list=feature_artist_list,
album_list=album_list, **kwargs)
UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("album_collection", "main_artist_collection", "feature_artist_collection") UPWARDS_COLLECTION_STRING_ATTRIBUTES = ("album_collection", "main_artist_collection", "feature_artist_collection")

View File

@ -187,7 +187,7 @@ class YoutubeMusic(SuperYouTube):
SuperYouTube.__init__(self, *args, **kwargs) SuperYouTube.__init__(self, *args, **kwargs)
self.download_connection: Connection = Connection( self.download_connection: Connection = Connection(
host="https://music.youtube.com/", host="https://rr2---sn-cxaf0x-nugl.googlevideo.com/",
logger=self.LOGGER, logger=self.LOGGER,
sleep_after_404=youtube_settings["sleep_after_youtube_403"] sleep_after_404=youtube_settings["sleep_after_youtube_403"]
) )
@ -514,9 +514,7 @@ class YoutubeMusic(SuperYouTube):
self.LOGGER.warning(f"Couldn't fetch the audio source with the innertube api, falling back to invidious.") self.LOGGER.warning(f"Couldn't fetch the audio source with the innertube api, falling back to invidious.")
return super().download_song_to_target(source, target) return super().download_song_to_target(source, target)
return self.download_connection.stream_into(source.audio_url, target, description=desc, headers={ return self.download_connection.stream_into(source.audio_url, target, name=desc, raw_url=True, disable_cache=True)
"Host": "rr1---sn-cxaf0x-nugl.googlevideo.com"
}, raw_url=True, disable_cache=True)
def __del__(self): def __del__(self):
self.ydl.__exit__() self.ydl.__exit__()