2024-02-28 13:27:35 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2024-01-19 17:45:12 +00:00
|
|
|
import logging
|
|
|
|
import threading
|
2023-06-13 18:10:11 +00:00
|
|
|
import time
|
2024-01-19 17:45:12 +00:00
|
|
|
from typing import List, Dict, Optional, Set
|
2023-04-22 12:20:19 +00:00
|
|
|
from urllib.parse import urlparse, urlunsplit, ParseResult
|
2024-02-28 08:31:00 +00:00
|
|
|
import copy
|
2024-02-28 10:17:07 +00:00
|
|
|
import inspect
|
2023-04-20 16:47:47 +00:00
|
|
|
|
|
|
|
import requests
|
2024-01-17 14:10:50 +00:00
|
|
|
import responses
|
2023-06-15 16:22:00 +00:00
|
|
|
from tqdm import tqdm
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2024-01-17 14:10:50 +00:00
|
|
|
from .cache import Cache
|
2024-01-19 17:45:12 +00:00
|
|
|
from .rotating import RotatingProxy
|
|
|
|
from ..objects import Target
|
2024-04-25 23:05:22 +00:00
|
|
|
from ..utils import request_trace
|
2024-04-26 11:03:20 +00:00
|
|
|
from ..utils.string_processing import shorten_display_url
|
2023-09-10 14:27:09 +00:00
|
|
|
from ..utils.config import main_settings
|
2023-10-23 14:21:44 +00:00
|
|
|
from ..utils.support_classes.download_result import DownloadResult
|
2024-02-28 13:27:35 +00:00
|
|
|
from ..utils.hacking import merge_args
|
2023-04-20 16:47:47 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Connection:
|
|
|
|
def __init__(
|
|
|
|
self,
|
2024-04-10 16:18:52 +00:00
|
|
|
host: str = None,
|
2023-04-20 16:47:47 +00:00
|
|
|
proxies: List[dict] = None,
|
2024-02-28 08:31:00 +00:00
|
|
|
tries: int = (len(main_settings["proxies"]) + 1) * main_settings["tries_per_proxy"],
|
2023-04-20 16:47:47 +00:00
|
|
|
timeout: int = 7,
|
2023-04-20 17:37:41 +00:00
|
|
|
logger: logging.Logger = logging.getLogger("connection"),
|
2023-04-20 16:47:47 +00:00
|
|
|
header_values: Dict[str, str] = None,
|
|
|
|
accepted_response_codes: Set[int] = None,
|
2023-06-22 12:30:26 +00:00
|
|
|
semantic_not_found: bool = True,
|
2023-07-17 19:19:19 +00:00
|
|
|
sleep_after_404: float = 0.0,
|
2024-01-17 14:10:50 +00:00
|
|
|
heartbeat_interval=0,
|
|
|
|
module: str = "general",
|
|
|
|
cache_expiring_duration: float = 10
|
2023-04-20 16:47:47 +00:00
|
|
|
):
|
|
|
|
if proxies is None:
|
2023-09-10 14:27:09 +00:00
|
|
|
proxies = main_settings["proxies"]
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2024-01-17 14:10:50 +00:00
|
|
|
self.cache: Cache = Cache(module=module, logger=logger)
|
|
|
|
self.cache_expiring_duration = cache_expiring_duration
|
|
|
|
|
2024-02-28 13:52:02 +00:00
|
|
|
self.HEADER_VALUES = dict() if header_values is None else header_values
|
2023-04-22 12:20:19 +00:00
|
|
|
|
2023-04-20 17:37:41 +00:00
|
|
|
self.LOGGER = logger
|
2024-04-10 16:18:52 +00:00
|
|
|
self.HOST = host if host is None else urlparse(host)
|
2023-04-20 16:47:47 +00:00
|
|
|
self.TRIES = tries
|
|
|
|
self.TIMEOUT = timeout
|
|
|
|
self.rotating_proxy = RotatingProxy(proxy_list=proxies)
|
|
|
|
|
|
|
|
self.ACCEPTED_RESPONSE_CODES = accepted_response_codes or {200}
|
|
|
|
self.SEMANTIC_NOT_FOUND = semantic_not_found
|
2023-06-22 12:30:26 +00:00
|
|
|
self.sleep_after_404 = sleep_after_404
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2023-04-23 10:08:39 +00:00
|
|
|
self.session = requests.Session()
|
|
|
|
self.session.headers = self.get_header(**self.HEADER_VALUES)
|
|
|
|
self.session.proxies = self.rotating_proxy.current_proxy
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2023-09-12 06:45:48 +00:00
|
|
|
self.heartbeat_thread = None
|
|
|
|
self.heartbeat_interval = heartbeat_interval
|
2023-07-17 19:19:19 +00:00
|
|
|
|
2024-02-28 08:31:00 +00:00
|
|
|
self.lock: bool = False
|
|
|
|
|
2023-09-12 06:45:48 +00:00
|
|
|
def start_heartbeat(self):
|
|
|
|
if self.heartbeat_interval <= 0:
|
|
|
|
self.LOGGER.warning(f"Can't start a heartbeat with {self.heartbeat_interval}s in between.")
|
2023-07-19 19:40:23 +00:00
|
|
|
|
2024-02-28 08:31:00 +00:00
|
|
|
self.heartbeat_thread = threading.Thread(target=self._heartbeat_loop, args=(self.heartbeat_interval,), daemon=True)
|
2023-09-12 06:45:48 +00:00
|
|
|
self.heartbeat_thread.start()
|
2023-07-19 19:40:23 +00:00
|
|
|
|
2023-09-12 06:45:48 +00:00
|
|
|
def heartbeat_failed(self):
|
2024-02-28 08:31:00 +00:00
|
|
|
self.LOGGER.warning(f"The hearth couldn't beat.")
|
2023-07-17 19:19:19 +00:00
|
|
|
|
2023-09-12 06:45:48 +00:00
|
|
|
def heartbeat(self):
|
2023-07-17 19:19:19 +00:00
|
|
|
# Your code to send heartbeat requests goes here
|
2024-02-28 08:31:00 +00:00
|
|
|
raise NotImplementedError("please implement the heartbeat function.")
|
2023-07-17 19:19:19 +00:00
|
|
|
|
2023-09-12 06:45:48 +00:00
|
|
|
def _heartbeat_loop(self, interval: float):
|
|
|
|
def heartbeat_wrapper():
|
2024-02-28 08:31:00 +00:00
|
|
|
self.LOGGER.debug(f"The hearth is beating.")
|
2023-09-12 06:45:48 +00:00
|
|
|
self.heartbeat()
|
2023-07-17 19:19:19 +00:00
|
|
|
|
|
|
|
while True:
|
2023-09-12 06:45:48 +00:00
|
|
|
heartbeat_wrapper()
|
2023-07-17 19:19:19 +00:00
|
|
|
time.sleep(interval)
|
|
|
|
|
2023-04-22 12:20:19 +00:00
|
|
|
def base_url(self, url: ParseResult = None):
|
2024-04-10 16:18:52 +00:00
|
|
|
if url is None and self.HOST is not None:
|
2023-04-22 12:20:19 +00:00
|
|
|
url = self.HOST
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2023-04-22 12:20:19 +00:00
|
|
|
return urlunsplit((url.scheme, url.netloc, "", "", ""))
|
|
|
|
|
2023-04-20 16:47:47 +00:00
|
|
|
def get_header(self, **header_values) -> Dict[str, str]:
|
2024-04-10 16:18:52 +00:00
|
|
|
headers = {
|
2024-01-22 20:39:39 +00:00
|
|
|
"user-agent": main_settings["user_agent"],
|
|
|
|
"User-Agent": main_settings["user_agent"],
|
2023-04-20 16:47:47 +00:00
|
|
|
"Connection": "keep-alive",
|
2024-01-22 20:39:39 +00:00
|
|
|
"Accept-Language": main_settings["language"],
|
2023-04-20 16:47:47 +00:00
|
|
|
}
|
|
|
|
|
2024-04-10 16:18:52 +00:00
|
|
|
if self.HOST is not None:
|
2024-04-16 12:19:07 +00:00
|
|
|
# headers["Host"] = self.HOST.netloc
|
2024-04-10 16:18:52 +00:00
|
|
|
headers["Referer"] = self.base_url(url=self.HOST)
|
|
|
|
|
|
|
|
headers.update(header_values)
|
|
|
|
|
|
|
|
return headers
|
|
|
|
|
2023-04-20 16:47:47 +00:00
|
|
|
def rotate(self):
|
2023-04-23 10:08:39 +00:00
|
|
|
self.session.proxies = self.rotating_proxy.rotate()
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2023-04-23 10:08:39 +00:00
|
|
|
def _update_headers(
|
|
|
|
self,
|
|
|
|
headers: Optional[dict],
|
|
|
|
refer_from_origin: bool,
|
|
|
|
url: ParseResult
|
|
|
|
) -> Dict[str, str]:
|
2024-01-22 20:39:39 +00:00
|
|
|
headers = self.get_header(**(headers or {}))
|
2023-04-23 10:08:39 +00:00
|
|
|
if not refer_from_origin:
|
2024-04-11 18:13:12 +00:00
|
|
|
headers["Referer"] = self.base_url(url=url)
|
2023-04-22 12:20:19 +00:00
|
|
|
|
2023-04-23 10:08:39 +00:00
|
|
|
return headers
|
2023-04-22 12:20:19 +00:00
|
|
|
|
2024-04-26 11:50:17 +00:00
|
|
|
def save(self, r: requests.Response, name: str, error: bool = False, no_update_if_valid_exists: bool = False, **kwargs):
|
2024-01-22 20:39:39 +00:00
|
|
|
n_kwargs = {}
|
|
|
|
if error:
|
|
|
|
n_kwargs["module"] = "failed_requests"
|
|
|
|
|
2024-04-26 11:50:17 +00:00
|
|
|
if self.cache.get(name) is not None and no_update_if_valid_exists:
|
|
|
|
return
|
|
|
|
|
2024-04-26 12:04:44 +00:00
|
|
|
self.cache.set(r.content, name, expires_in=kwargs.get("expires_in", self.cache_expiring_duration), additional_info={
|
2024-04-26 12:24:14 +00:00
|
|
|
"encoding": r.encoding,
|
2024-04-26 12:04:44 +00:00
|
|
|
}, **n_kwargs)
|
2024-01-17 14:10:50 +00:00
|
|
|
|
|
|
|
def request(
|
2023-04-20 16:47:47 +00:00
|
|
|
self,
|
|
|
|
url: str,
|
2024-02-28 10:17:07 +00:00
|
|
|
timeout: float = None,
|
|
|
|
headers: Optional[dict] = None,
|
|
|
|
try_count: int = 0,
|
|
|
|
accepted_response_codes: set = None,
|
2023-04-23 10:08:39 +00:00
|
|
|
refer_from_origin: bool = True,
|
2023-05-03 13:16:01 +00:00
|
|
|
raw_url: bool = False,
|
2024-04-08 16:10:42 +00:00
|
|
|
raw_headers: bool = False,
|
2023-06-22 12:30:26 +00:00
|
|
|
sleep_after_404: float = None,
|
2023-09-12 06:45:48 +00:00
|
|
|
is_heartbeat: bool = False,
|
2024-02-28 10:17:07 +00:00
|
|
|
disable_cache: bool = None,
|
2024-04-26 11:50:17 +00:00
|
|
|
enable_cache_readonly: bool = False,
|
2024-02-28 10:17:07 +00:00
|
|
|
method: str = None,
|
2024-01-17 14:10:50 +00:00
|
|
|
name: str = "",
|
2024-04-11 18:13:12 +00:00
|
|
|
exclude_headers: List[str] = None,
|
2023-04-20 16:47:47 +00:00
|
|
|
**kwargs
|
|
|
|
) -> Optional[requests.Response]:
|
2024-02-28 10:17:07 +00:00
|
|
|
if method is None:
|
|
|
|
raise AttributeError("method is not set.")
|
|
|
|
method = method.upper()
|
2024-02-28 13:27:35 +00:00
|
|
|
headers = dict() if headers is None else headers
|
2024-04-25 23:13:57 +00:00
|
|
|
disable_cache = (headers.get("Cache-Control", "").lower() == "no-cache" if disable_cache is None else disable_cache) or kwargs.get("stream", False)
|
2024-02-28 10:17:07 +00:00
|
|
|
accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes
|
|
|
|
|
|
|
|
current_kwargs = copy.copy(locals())
|
2024-02-28 13:52:02 +00:00
|
|
|
current_kwargs.pop("kwargs")
|
|
|
|
current_kwargs.update(**kwargs)
|
|
|
|
|
2024-01-22 20:39:39 +00:00
|
|
|
parsed_url = urlparse(url)
|
2024-04-26 11:03:20 +00:00
|
|
|
trace_string = f"{method} {shorten_display_url(url)} \t{'[stream]' if kwargs.get('stream', False) else ''}"
|
2024-04-08 16:10:42 +00:00
|
|
|
|
|
|
|
if not raw_headers:
|
|
|
|
_headers = copy.copy(self.HEADER_VALUES)
|
|
|
|
_headers.update(headers)
|
|
|
|
|
|
|
|
headers = self._update_headers(
|
|
|
|
headers=_headers,
|
|
|
|
refer_from_origin=refer_from_origin,
|
|
|
|
url=parsed_url
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
headers = headers or {}
|
2024-02-28 13:52:02 +00:00
|
|
|
|
2024-04-08 16:10:42 +00:00
|
|
|
request_url = parsed_url.geturl() if not raw_url else url
|
2024-01-22 20:39:39 +00:00
|
|
|
|
2024-04-26 11:50:17 +00:00
|
|
|
if name != "" and (not disable_cache or enable_cache_readonly):
|
2024-01-17 14:10:50 +00:00
|
|
|
cached = self.cache.get(name)
|
|
|
|
|
2024-01-22 20:39:39 +00:00
|
|
|
if cached is not None:
|
2024-04-25 23:05:22 +00:00
|
|
|
request_trace(f"{trace_string}\t[cached]")
|
|
|
|
|
2024-01-22 20:39:39 +00:00
|
|
|
with responses.RequestsMock() as resp:
|
2024-04-26 12:24:14 +00:00
|
|
|
additional_info = cached.attribute.additional_info
|
|
|
|
|
2024-04-26 12:04:44 +00:00
|
|
|
body = cached.content
|
2024-04-26 16:42:39 +00:00
|
|
|
if additional_info.get("encoding", None) is not None:
|
2024-04-26 12:24:14 +00:00
|
|
|
body = body.decode(additional_info["encoding"])
|
2024-04-26 12:04:44 +00:00
|
|
|
|
2024-01-22 20:39:39 +00:00
|
|
|
resp.add(
|
|
|
|
method=method,
|
2024-04-08 16:10:42 +00:00
|
|
|
url=request_url,
|
2024-04-26 12:04:44 +00:00
|
|
|
body=body,
|
2024-01-22 20:39:39 +00:00
|
|
|
)
|
|
|
|
return requests.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs)
|
2024-01-17 14:10:50 +00:00
|
|
|
|
2023-06-22 12:30:26 +00:00
|
|
|
if sleep_after_404 is None:
|
|
|
|
sleep_after_404 = self.sleep_after_404
|
2023-04-20 16:47:47 +00:00
|
|
|
if try_count >= self.TRIES:
|
|
|
|
return
|
|
|
|
|
2023-04-20 17:37:41 +00:00
|
|
|
if timeout is None:
|
|
|
|
timeout = self.TIMEOUT
|
|
|
|
|
2024-04-11 18:13:12 +00:00
|
|
|
for header in exclude_headers or []:
|
|
|
|
if header in headers:
|
|
|
|
del headers[header]
|
|
|
|
|
2024-04-25 23:05:22 +00:00
|
|
|
if try_count <= 0:
|
|
|
|
request_trace(trace_string)
|
|
|
|
|
2024-01-22 20:39:39 +00:00
|
|
|
r = None
|
2023-05-03 14:14:03 +00:00
|
|
|
connection_failed = False
|
2023-04-20 16:47:47 +00:00
|
|
|
try:
|
2024-02-28 08:31:00 +00:00
|
|
|
if self.lock:
|
2023-09-12 06:45:48 +00:00
|
|
|
self.LOGGER.info(f"Waiting for the heartbeat to finish.")
|
2024-02-28 08:31:00 +00:00
|
|
|
while self.lock and not is_heartbeat:
|
2023-07-19 19:40:23 +00:00
|
|
|
pass
|
2024-02-28 08:31:00 +00:00
|
|
|
|
|
|
|
self.lock = True
|
2024-04-24 10:12:09 +00:00
|
|
|
r: requests.Response = self.session.request(method=method, url=url, timeout=timeout, headers=headers, **kwargs)
|
2023-05-03 14:14:03 +00:00
|
|
|
|
2023-06-16 21:27:08 +00:00
|
|
|
if r.status_code in accepted_response_codes:
|
2024-01-22 20:39:39 +00:00
|
|
|
if not disable_cache:
|
|
|
|
self.save(r, name, **kwargs)
|
2023-05-03 14:14:03 +00:00
|
|
|
return r
|
|
|
|
|
2024-02-28 08:31:00 +00:00
|
|
|
# the server rejected the request, or the internet is lacking
|
2023-04-20 16:47:47 +00:00
|
|
|
except requests.exceptions.Timeout:
|
2023-05-03 13:16:01 +00:00
|
|
|
self.LOGGER.warning(f"Request timed out at \"{request_url}\": ({try_count}-{self.TRIES})")
|
2023-05-03 14:14:03 +00:00
|
|
|
connection_failed = True
|
2023-04-20 16:47:47 +00:00
|
|
|
except requests.exceptions.ConnectionError:
|
2023-05-03 13:16:01 +00:00
|
|
|
self.LOGGER.warning(f"Couldn't connect to \"{request_url}\": ({try_count}-{self.TRIES})")
|
2023-05-03 14:14:03 +00:00
|
|
|
connection_failed = True
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2024-02-28 08:31:00 +00:00
|
|
|
# this is important for thread safety
|
|
|
|
finally:
|
|
|
|
self.lock = False
|
|
|
|
|
2024-04-16 12:19:07 +00:00
|
|
|
if r is None:
|
2024-04-25 20:29:43 +00:00
|
|
|
self.LOGGER.warning(f"{parsed_url.netloc} didn't respond at {url}. ({try_count}-{self.TRIES})")
|
2024-04-16 12:19:07 +00:00
|
|
|
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in headers.items()))
|
|
|
|
else:
|
2024-04-25 20:29:43 +00:00
|
|
|
self.LOGGER.warning(f"{parsed_url.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})")
|
2024-04-16 12:19:07 +00:00
|
|
|
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items()))
|
|
|
|
self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items()))
|
|
|
|
self.LOGGER.debug(r.content)
|
|
|
|
|
|
|
|
if name != "":
|
|
|
|
self.save(r, name, error=True, **kwargs)
|
|
|
|
|
|
|
|
if self.SEMANTIC_NOT_FOUND and r.status_code == 404:
|
|
|
|
return None
|
2024-01-22 20:39:39 +00:00
|
|
|
|
2023-06-22 12:30:26 +00:00
|
|
|
if sleep_after_404 != 0:
|
|
|
|
self.LOGGER.warning(f"Waiting for {sleep_after_404} seconds.")
|
|
|
|
time.sleep(sleep_after_404)
|
2023-04-20 16:47:47 +00:00
|
|
|
|
|
|
|
self.rotate()
|
|
|
|
|
2024-02-28 08:31:00 +00:00
|
|
|
current_kwargs["try_count"] = current_kwargs.get("try_count", 0) + 1
|
2024-02-28 13:27:35 +00:00
|
|
|
return Connection.request(**current_kwargs)
|
2023-04-20 16:47:47 +00:00
|
|
|
|
2024-02-28 10:17:07 +00:00
|
|
|
@merge_args(request)
|
|
|
|
def get(self, *args, **kwargs) -> Optional[requests.Response]:
|
|
|
|
return self.request(
|
|
|
|
*args,
|
2024-01-17 14:10:50 +00:00
|
|
|
method="GET",
|
2023-04-20 16:47:47 +00:00
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
|
2024-02-28 10:17:07 +00:00
|
|
|
@merge_args(request)
|
2023-04-20 16:47:47 +00:00
|
|
|
def post(
|
|
|
|
self,
|
2024-02-28 10:17:07 +00:00
|
|
|
*args,
|
2023-09-10 21:12:49 +00:00
|
|
|
json: dict = None,
|
2023-04-20 16:47:47 +00:00
|
|
|
**kwargs
|
|
|
|
) -> Optional[requests.Response]:
|
2024-01-17 14:10:50 +00:00
|
|
|
r = self.request(
|
2024-02-28 10:17:07 +00:00
|
|
|
*args,
|
2024-01-17 14:10:50 +00:00
|
|
|
method="POST",
|
2023-04-20 16:47:47 +00:00
|
|
|
json=json,
|
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
if r is None:
|
|
|
|
self.LOGGER.warning(f"payload: {json}")
|
|
|
|
return r
|
2023-06-15 16:22:00 +00:00
|
|
|
|
2024-02-28 10:17:07 +00:00
|
|
|
@merge_args(request)
|
2023-06-15 16:22:00 +00:00
|
|
|
def stream_into(
|
|
|
|
self,
|
|
|
|
url: str,
|
|
|
|
target: Target,
|
2024-02-28 10:17:07 +00:00
|
|
|
name: str = "download",
|
2023-09-10 14:27:09 +00:00
|
|
|
chunk_size: int = main_settings["chunk_size"],
|
2023-06-16 10:26:02 +00:00
|
|
|
progress: int = 0,
|
2024-02-28 10:17:07 +00:00
|
|
|
method: str = "GET",
|
2024-04-08 16:10:42 +00:00
|
|
|
try_count: int = 0,
|
|
|
|
accepted_response_codes: set = None,
|
2023-06-15 16:22:00 +00:00
|
|
|
**kwargs
|
2023-06-16 10:26:02 +00:00
|
|
|
) -> DownloadResult:
|
2024-04-08 16:10:42 +00:00
|
|
|
accepted_response_codes = self.ACCEPTED_RESPONSE_CODES if accepted_response_codes is None else accepted_response_codes
|
2024-02-28 10:17:07 +00:00
|
|
|
stream_kwargs = copy.copy(locals())
|
2024-04-08 16:10:42 +00:00
|
|
|
stream_kwargs.update(stream_kwargs.pop("kwargs"))
|
|
|
|
|
|
|
|
if "description" in kwargs:
|
|
|
|
name = kwargs.pop("description")
|
2023-06-16 21:27:08 +00:00
|
|
|
|
|
|
|
if progress > 0:
|
2024-05-15 15:14:01 +00:00
|
|
|
headers = kwargs.get("headers", dict())
|
2023-06-16 21:27:08 +00:00
|
|
|
headers["Range"] = f"bytes={target.size}-"
|
|
|
|
|
2024-01-17 14:10:50 +00:00
|
|
|
r = self.request(
|
2023-06-15 16:22:00 +00:00
|
|
|
url=url,
|
2024-02-28 10:17:07 +00:00
|
|
|
name=name,
|
|
|
|
method=method,
|
2024-02-28 13:27:35 +00:00
|
|
|
stream=True,
|
2024-04-08 16:10:42 +00:00
|
|
|
accepted_response_codes=accepted_response_codes,
|
2023-06-15 16:22:00 +00:00
|
|
|
**kwargs
|
|
|
|
)
|
|
|
|
|
|
|
|
if r is None:
|
2024-02-28 10:17:07 +00:00
|
|
|
return DownloadResult(error_message=f"Could not establish a stream from: {url}")
|
2023-06-15 16:22:00 +00:00
|
|
|
|
|
|
|
target.create_path()
|
2024-04-11 18:13:12 +00:00
|
|
|
total_size = int(r.headers.get('content-length', r.headers.get('Content-Length', chunk_size)))
|
2023-06-16 10:26:02 +00:00
|
|
|
progress = 0
|
|
|
|
|
|
|
|
retry = False
|
2023-06-15 16:22:00 +00:00
|
|
|
|
2023-06-16 21:27:08 +00:00
|
|
|
with target.open("ab") as f:
|
|
|
|
"""
|
|
|
|
https://en.wikipedia.org/wiki/Kilobyte
|
|
|
|
> The internationally recommended unit symbol for the kilobyte is kB.
|
|
|
|
"""
|
2024-01-17 14:10:50 +00:00
|
|
|
|
2024-04-09 08:32:17 +00:00
|
|
|
with tqdm(total=total_size, initial=target.size, unit='B', unit_scale=True, unit_divisor=1024, desc=name) as t:
|
2023-06-16 21:27:08 +00:00
|
|
|
try:
|
2023-06-16 10:26:02 +00:00
|
|
|
for chunk in r.iter_content(chunk_size=chunk_size):
|
2023-06-15 16:22:00 +00:00
|
|
|
size = f.write(chunk)
|
2023-06-16 10:26:02 +00:00
|
|
|
progress += size
|
2023-06-15 16:22:00 +00:00
|
|
|
t.update(size)
|
|
|
|
|
2024-04-08 16:10:42 +00:00
|
|
|
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout, requests.exceptions.ChunkedEncodingError):
|
2023-06-16 21:27:08 +00:00
|
|
|
if try_count >= self.TRIES:
|
|
|
|
self.LOGGER.warning(f"Stream timed out at \"{url}\": to many retries, aborting.")
|
2024-02-28 10:17:07 +00:00
|
|
|
return DownloadResult(error_message=f"Stream timed out from {url}, reducing the chunk_size might help.")
|
2023-06-16 21:27:08 +00:00
|
|
|
|
|
|
|
self.LOGGER.warning(f"Stream timed out at \"{url}\": ({try_count}-{self.TRIES})")
|
|
|
|
retry = True
|
2024-04-08 16:10:42 +00:00
|
|
|
try_count += 1
|
2023-06-16 21:27:08 +00:00
|
|
|
|
|
|
|
if total_size > progress:
|
2023-06-16 10:26:02 +00:00
|
|
|
retry = True
|
|
|
|
|
2023-06-16 21:27:08 +00:00
|
|
|
if retry:
|
|
|
|
self.LOGGER.warning(f"Retrying stream...")
|
|
|
|
accepted_response_codes.add(206)
|
2024-05-15 13:04:00 +00:00
|
|
|
stream_kwargs["progress"] = progress
|
2024-02-28 13:52:02 +00:00
|
|
|
return Connection.stream_into(**stream_kwargs)
|
2023-06-16 21:27:08 +00:00
|
|
|
|
|
|
|
return DownloadResult()
|