fixed issue with crash on connection exception

This commit is contained in:
Hellow2 2023-04-03 12:14:58 +02:00
parent 041612e083
commit 77eef63fac
3 changed files with 34 additions and 17 deletions

View File

@ -63,16 +63,24 @@ class Target(DatabaseObject):
with open(copy_to.file_path, "wb") as write_to:
write_to.write(read_from.read())
def stream_into(self, r: requests.Response):
def stream_into(self, r: requests.Response) -> bool:
if r is None:
return False
self.create_path()
chunk_size = 1024
total_size = int(r.headers.get('content-length'))
initial_pos = 0
with open(self.file_path,'wb') as f:
for chunk in r.iter_content(chunk_size=chunk_size):
size = f.write(chunk)
try:
for chunk in r.iter_content(chunk_size=chunk_size):
size = f.write(chunk)
except requests.exceptions.Timeout:
shared.DOWNLOAD_LOGGER.error("Stream timed out.")
return False
"""
# doesn't work yet due to
@ -85,3 +93,5 @@ class Target(DatabaseObject):
size = f.write(chunk)
pbar.update(size)
"""
return True

View File

@ -60,6 +60,7 @@ class Page:
API_SESSION.proxies = shared.proxies
TIMEOUT = 5
TRIES = 5
LOGGER = LOGGER
SOURCE_TYPE: SourcePages
@ -78,11 +79,11 @@ class Page:
return r
if not retry:
LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at GET:{url}. ({trie}-{cls.TRIES})")
LOGGER.debug(r.content)
cls.LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at GET:{url}. ({trie}-{cls.TRIES})")
cls.LOGGER.debug(r.content)
if trie >= cls.TRIES:
LOGGER.warning("to many tries. Aborting.")
cls.LOGGER.warning("to many tries. Aborting.")
return None
return cls.get_request(url=url, stream=stream, accepted_response_codes=accepted_response_codes, trie=trie + 1)
@ -102,11 +103,11 @@ class Page:
return r
if not retry:
LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at POST:{url}. ({trie}-{cls.TRIES})")
LOGGER.debug(r.content)
cls.LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at POST:{url}. ({trie}-{cls.TRIES})")
cls.LOGGER.debug(r.content)
if trie >= cls.TRIES:
LOGGER.warning("to many tries. Aborting.")
cls.LOGGER.warning("to many tries. Aborting.")
return None
return cls.post_request(url=url, json=json, accepted_response_codes=accepted_response_codes, trie=trie + 1)
@ -414,18 +415,26 @@ class Page:
continue
existing_target.copy_content(target)
return True
sources = song.source_collection.get_sources_from_page(cls.SOURCE_TYPE)
if len(sources) == 0:
return
return False
temp_target: Target = Target(
path=shared.TEMP_DIR,
file=str(random.randint(0, 999999))
)
cls._download_song_to_targets(source=sources[0], target=temp_target)
cls._post_process_targets(song, temp_target)
success = True
if not cls._download_song_to_targets(source=sources[0], target=temp_target):
success = False
if not cls._post_process_targets(song, temp_target):
success = False
return success
@classmethod
def _post_process_targets(cls, song: Song, temp_target: Target):

View File

@ -9,10 +9,6 @@ from dataclasses import dataclass
from pathlib import Path
import random
from ..utils.shared import (
ENCYCLOPAEDIA_METALLUM_LOGGER as LOGGER
)
from .abstract import Page
from ..objects import (
DatabaseObject,
@ -89,6 +85,8 @@ class Musify(Page):
SOURCE_TYPE = SourcePages.MUSIFY
LOGGER = LOGGER
@classmethod
def parse_url(cls, url: str) -> MusifyUrl:
parsed = urlparse(url)