feature/sponsorblock #17

Merged
Hazel merged 20 commits from feature/sponsorblock into experimental 2024-04-27 09:41:36 +00:00
5 changed files with 20 additions and 14 deletions
Showing only changes of commit f52b5e6325 - Show all commits

View File

@ -6,8 +6,8 @@ logging.getLogger().setLevel(logging.DEBUG)
if __name__ == "__main__":
commands = [
"s: #a Ruffiction",
"d: 8",
"s: #a Crystal F",
"d: 20",
]

View File

@ -17,7 +17,7 @@ class CacheAttribute:
created: datetime
expires: datetime
additional_info: dict = field(default_factory=dict)
@property
@ -57,13 +57,16 @@ class Cache:
self._time_fields = {"created", "expires"}
with self.index.open("r") as i:
for c in json.loads(i.read()):
for key in self._time_fields:
c[key] = datetime.fromisoformat(c[key])
try:
for c in json.loads(i.read()):
for key in self._time_fields:
c[key] = datetime.fromisoformat(c[key])
ca = CacheAttribute(**c)
self.cached_attributes.append(ca)
self._id_to_attribute[ca.id] = ca
ca = CacheAttribute(**c)
self.cached_attributes.append(ca)
self._id_to_attribute[ca.id] = ca
except json.JSONDecodeError:
pass
@lru_cache()
def _init_module(self, module: str) -> Path:
@ -129,6 +132,7 @@ class Cache:
name=name,
created=datetime.now(),
expires=datetime.now() + timedelta(days=expires_in),
additional_info=additional_info,
)
self._write_attribute(cache_attribute)

View File

@ -134,7 +134,7 @@ class Connection:
return
self.cache.set(r.content, name, expires_in=kwargs.get("expires_in", self.cache_expiring_duration), additional_info={
"encoding", r.encoding,
"encoding": r.encoding,
}, **n_kwargs)
def request(
@ -191,9 +191,11 @@ class Connection:
request_trace(f"{trace_string}\t[cached]")
with responses.RequestsMock() as resp:
additional_info = cached.attribute.additional_info
body = cached.content
if "encoding" in cached.additional_info:
body = body.decode(cached.additional_info["encoding"])
if "encoding" in additional_info:
body = body.decode(additional_info["encoding"])
resp.add(
method=method,

View File

@ -451,7 +451,7 @@ class Page:
source = sources[0]
if not found_on_disc:
r = self.download_song_to_target(source=source, target=temp_target, desc=song.title)
r = self.download_song_to_target(source=source, target=temp_target, desc=song.option_string)
if not r.is_fatal_error:
r.merge(self._post_process_targets(song, temp_target,

View File

@ -1128,4 +1128,4 @@ class Musify(Page):
self.LOGGER.warning(f"The source has no audio link. Falling back to {endpoint}.")
return self.stream_connection.stream_into(endpoint, target, raw_url=True, exclude_headers=["Host"])
return self.stream_connection.stream_into(endpoint, target, raw_url=True, exclude_headers=["Host"], name=desc)