updated conecting behaviour

This commit is contained in:
Hellow2 2023-06-16 12:26:02 +02:00
parent f471c6a72b
commit 7277b7e512
3 changed files with 59 additions and 14 deletions

View File

@ -8,6 +8,7 @@ from tqdm import tqdm
from .rotating import RotatingProxy
from ..utils.shared import PROXIES_LIST, CHUNK_SIZE
from ..utils.support_classes import DownloadResult
from ..objects import Target
@ -209,8 +210,12 @@ class Connection:
timeout: float = None,
headers: dict = None,
raw_url: bool = False,
chunk_size: int = CHUNK_SIZE,
try_count: int = 0,
progress: int = 0,
**kwargs
):
) -> DownloadResult:
r = self._request(
request=self.session.get,
try_count=0,
@ -225,10 +230,13 @@ class Connection:
)
if r is None:
return False
return DownloadResult(error_message=f"Could not establish connection to: {url}")
target.create_path()
total_size = int(r.headers.get('content-length'))
progress = 0
retry = False
with target.open("wb") as f:
try:
@ -236,13 +244,39 @@ class Connection:
https://en.wikipedia.org/wiki/Kilobyte
> The internationally recommended unit symbol for the kilobyte is kB.
"""
with tqdm(total=total_size, unit='B', unit_scale=True, unit_divisor=1024, desc=description) as t:
for chunk in r.iter_content(chunk_size=CHUNK_SIZE):
for chunk in r.iter_content(chunk_size=chunk_size):
size = f.write(chunk)
progress += size
t.update(size)
return True
except requests.exceptions.ConnectionError:
self.LOGGER.error("Stream timed out.")
return False
if try_count >= self.TRIES:
self.LOGGER.warning(f"Stream timed out at \"{url}\": to many retries, aborting.")
return DownloadResult(error_message=f"Stream timed out from {url}, reducing the chunksize might help.")
self.LOGGER.warning(f"Stream timed out at \"{url}\": ({try_count}-{self.TRIES})")
retry = True
finally:
if total_size > progress or retry:
return self.stream_into(
url = url,
target = target,
description = description,
try_count=try_count+1,
progress=progress,
accepted_response_code=accepted_response_codes,
timeout=timeout,
headers=headers,
raw_url=raw_url,
refer_from_origin=refer_from_origin,
chunk_size=chunk_size,
**kwargs
)
return DownloadResult()

View File

@ -394,22 +394,26 @@ class Page:
path=shared.TEMP_DIR,
file=str(random.randint(0, 999999))
)
r = DownloadResult(1)
found_on_disc = False
target: Target
for target in song.target_collection:
if target.exists:
target.copy_content(temp_target)
found_on_disc = True
break
r = DownloadResult(1)
r.found_on_disk += 1
r.add_target(target)
if found_on_disc:
self.LOGGER.info(f"{song.option_string} already exists, thus not downloading again.")
return r
source = sources[0]
if not found_on_disc:
r = self.download_song_to_target(source=source, target=temp_target, desc=song.title)
else:
self.LOGGER.info(f"{song.option_string} already exists, thus not downloading again.")
r = self.download_song_to_target(source=source, target=temp_target, desc=song.title)
if not r.is_fatal_error:
r.merge(self._post_process_targets(song, temp_target, [] if found_on_disc else self.get_skip_intervals(song, source)))
@ -432,6 +436,7 @@ class Page:
r.add_target(target)
temp_target.delete()
r.sponsor_segments += len(interval_list)
return r

View File

@ -12,8 +12,10 @@ UNIT_DIVISOR = 1024
class DownloadResult:
total: int = 0
fail: int = 0
sponsor_segments: int = 0
error_message: str = None
total_size = 0
found_on_disk: int = 0
_error_message_list: List[str] = field(default_factory=list)
@ -71,7 +73,9 @@ class DownloadResult:
self.fail += other.fail
self._error_message_list.extend(other._error_message_list)
self.sponsor_segments += other.sponsor_segments
self.total_size += other.total_size
self.found_on_disk += other.found_on_disk
def __str__(self):
if self.is_fatal_error:
@ -79,7 +83,9 @@ class DownloadResult:
head = f"{self.fail} from {self.total} downloads failed:\n" \
f"successrate:\t{int(self.success_percentage * 100)}%\n" \
f"failrate:\t{int(self.failure_percentage * 100)}%\n" \
f"total size:\t{self.formated_size}"
f"total size:\t{self.formated_size}\n" \
f"skipped segments:\t{self.sponsor_segments}" \
f"found on disc:\t{self.found_on_disk}"
if not self.is_mild_failure:
return head