fixed issue with crash on connection exception
This commit is contained in:
parent
041612e083
commit
77eef63fac
@ -63,16 +63,24 @@ class Target(DatabaseObject):
|
|||||||
with open(copy_to.file_path, "wb") as write_to:
|
with open(copy_to.file_path, "wb") as write_to:
|
||||||
write_to.write(read_from.read())
|
write_to.write(read_from.read())
|
||||||
|
|
||||||
def stream_into(self, r: requests.Response):
|
def stream_into(self, r: requests.Response) -> bool:
|
||||||
|
if r is None:
|
||||||
|
return False
|
||||||
|
|
||||||
self.create_path()
|
self.create_path()
|
||||||
|
|
||||||
chunk_size = 1024
|
chunk_size = 1024
|
||||||
total_size = int(r.headers.get('content-length'))
|
total_size = int(r.headers.get('content-length'))
|
||||||
initial_pos = 0
|
initial_pos = 0
|
||||||
|
|
||||||
|
|
||||||
with open(self.file_path,'wb') as f:
|
with open(self.file_path,'wb') as f:
|
||||||
for chunk in r.iter_content(chunk_size=chunk_size):
|
try:
|
||||||
size = f.write(chunk)
|
for chunk in r.iter_content(chunk_size=chunk_size):
|
||||||
|
size = f.write(chunk)
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
shared.DOWNLOAD_LOGGER.error("Stream timed out.")
|
||||||
|
return False
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# doesn't work yet due to
|
# doesn't work yet due to
|
||||||
@ -85,3 +93,5 @@ class Target(DatabaseObject):
|
|||||||
size = f.write(chunk)
|
size = f.write(chunk)
|
||||||
pbar.update(size)
|
pbar.update(size)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
return True
|
||||||
|
@ -60,6 +60,7 @@ class Page:
|
|||||||
API_SESSION.proxies = shared.proxies
|
API_SESSION.proxies = shared.proxies
|
||||||
TIMEOUT = 5
|
TIMEOUT = 5
|
||||||
TRIES = 5
|
TRIES = 5
|
||||||
|
LOGGER = LOGGER
|
||||||
|
|
||||||
SOURCE_TYPE: SourcePages
|
SOURCE_TYPE: SourcePages
|
||||||
|
|
||||||
@ -78,11 +79,11 @@ class Page:
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
if not retry:
|
if not retry:
|
||||||
LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at GET:{url}. ({trie}-{cls.TRIES})")
|
cls.LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at GET:{url}. ({trie}-{cls.TRIES})")
|
||||||
LOGGER.debug(r.content)
|
cls.LOGGER.debug(r.content)
|
||||||
|
|
||||||
if trie >= cls.TRIES:
|
if trie >= cls.TRIES:
|
||||||
LOGGER.warning("to many tries. Aborting.")
|
cls.LOGGER.warning("to many tries. Aborting.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return cls.get_request(url=url, stream=stream, accepted_response_codes=accepted_response_codes, trie=trie + 1)
|
return cls.get_request(url=url, stream=stream, accepted_response_codes=accepted_response_codes, trie=trie + 1)
|
||||||
@ -102,11 +103,11 @@ class Page:
|
|||||||
return r
|
return r
|
||||||
|
|
||||||
if not retry:
|
if not retry:
|
||||||
LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at POST:{url}. ({trie}-{cls.TRIES})")
|
cls.LOGGER.warning(f"{cls.__name__} responded wit {r.status_code} at POST:{url}. ({trie}-{cls.TRIES})")
|
||||||
LOGGER.debug(r.content)
|
cls.LOGGER.debug(r.content)
|
||||||
|
|
||||||
if trie >= cls.TRIES:
|
if trie >= cls.TRIES:
|
||||||
LOGGER.warning("to many tries. Aborting.")
|
cls.LOGGER.warning("to many tries. Aborting.")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return cls.post_request(url=url, json=json, accepted_response_codes=accepted_response_codes, trie=trie + 1)
|
return cls.post_request(url=url, json=json, accepted_response_codes=accepted_response_codes, trie=trie + 1)
|
||||||
@ -414,18 +415,26 @@ class Page:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
existing_target.copy_content(target)
|
existing_target.copy_content(target)
|
||||||
|
return True
|
||||||
|
|
||||||
sources = song.source_collection.get_sources_from_page(cls.SOURCE_TYPE)
|
sources = song.source_collection.get_sources_from_page(cls.SOURCE_TYPE)
|
||||||
if len(sources) == 0:
|
if len(sources) == 0:
|
||||||
return
|
return False
|
||||||
|
|
||||||
temp_target: Target = Target(
|
temp_target: Target = Target(
|
||||||
path=shared.TEMP_DIR,
|
path=shared.TEMP_DIR,
|
||||||
file=str(random.randint(0, 999999))
|
file=str(random.randint(0, 999999))
|
||||||
)
|
)
|
||||||
|
|
||||||
cls._download_song_to_targets(source=sources[0], target=temp_target)
|
success = True
|
||||||
cls._post_process_targets(song, temp_target)
|
|
||||||
|
if not cls._download_song_to_targets(source=sources[0], target=temp_target):
|
||||||
|
success = False
|
||||||
|
|
||||||
|
if not cls._post_process_targets(song, temp_target):
|
||||||
|
success = False
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _post_process_targets(cls, song: Song, temp_target: Target):
|
def _post_process_targets(cls, song: Song, temp_target: Target):
|
||||||
|
@ -9,10 +9,6 @@ from dataclasses import dataclass
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from ..utils.shared import (
|
|
||||||
ENCYCLOPAEDIA_METALLUM_LOGGER as LOGGER
|
|
||||||
)
|
|
||||||
|
|
||||||
from .abstract import Page
|
from .abstract import Page
|
||||||
from ..objects import (
|
from ..objects import (
|
||||||
DatabaseObject,
|
DatabaseObject,
|
||||||
@ -89,6 +85,8 @@ class Musify(Page):
|
|||||||
|
|
||||||
SOURCE_TYPE = SourcePages.MUSIFY
|
SOURCE_TYPE = SourcePages.MUSIFY
|
||||||
|
|
||||||
|
LOGGER = LOGGER
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parse_url(cls, url: str) -> MusifyUrl:
|
def parse_url(cls, url: str) -> MusifyUrl:
|
||||||
parsed = urlparse(url)
|
parsed = urlparse(url)
|
||||||
|
Loading…
Reference in New Issue
Block a user