Compare commits

..

2 Commits

Author SHA1 Message Date
0b216b7d40 fix: intermediate transport commit 2024-04-16 17:50:01 +02:00
71d582a17c fix: improved debug for requests 2024-04-16 14:19:07 +02:00
6 changed files with 44 additions and 45 deletions

View File

@ -26,6 +26,7 @@
"OKBLUE",
"Referer",
"tracksort",
"unmap",
"youtube"
]
}

View File

@ -6,8 +6,8 @@ logging.getLogger().setLevel(logging.DEBUG)
if __name__ == "__main__":
commands = [
"s: #a And End...",
"d: 10",
"s: #a Ghost Bath",
"4",
]

View File

@ -101,7 +101,7 @@ class Connection:
}
if self.HOST is not None:
headers["Host"] = self.HOST.netloc
# headers["Host"] = self.HOST.netloc
headers["Referer"] = self.base_url(url=self.HOST)
headers.update(header_values)
@ -215,10 +215,6 @@ class Connection:
self.save(r, name, **kwargs)
return r
if self.SEMANTIC_NOT_FOUND and r.status_code == 404:
self.LOGGER.warning(f"Couldn't find url (404): {request_url}")
return None
# the server rejected the request, or the internet is lacking
except requests.exceptions.Timeout:
self.LOGGER.warning(f"Request timed out at \"{request_url}\": ({try_count}-{self.TRIES})")
@ -231,9 +227,11 @@ class Connection:
finally:
self.lock = False
if not connection_failed:
if r is None:
self.LOGGER.warning(f"{self.HOST.netloc} didn't respond at {url}. ({try_count}-{self.TRIES})")
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in headers.items()))
else:
self.LOGGER.warning(f"{self.HOST.netloc} responded wit {r.status_code} at {url}. ({try_count}-{self.TRIES})")
if r is not None:
self.LOGGER.debug("request headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.request.headers.items()))
self.LOGGER.debug("response headers:\n\t"+ "\n\t".join(f"{k}\t=\t{v}" for k, v in r.headers.items()))
self.LOGGER.debug(r.content)
@ -241,6 +239,9 @@ class Connection:
if name != "":
self.save(r, name, error=True, **kwargs)
if self.SEMANTIC_NOT_FOUND and r.status_code == 404:
return None
if sleep_after_404 != 0:
self.LOGGER.warning(f"Waiting for {sleep_after_404} seconds.")
time.sleep(sleep_after_404)

View File

@ -110,7 +110,7 @@ class Collection(Generic[T]):
if self._contained_in_self(__object):
return [self]
for collection in (*self.children, *self.parents):
for collection in self.children:
results.extend(collection._contained_in_sub(__object, break_at_first=break_at_first))
if break_at_first:
@ -198,7 +198,7 @@ class Collection(Generic[T]):
if value in self._indexed_values[name]:
return self._indexed_to_objects[value][0]
def _find_object(self, __object: T) -> Tuple[Collection[T], Optional[T]]:
def _find_object(self, __object: T, no_sibling: bool = False) -> Tuple[Collection[T], Optional[T]]:
other_object = self._find_object_in_self(__object)
if other_object is not None:
return self, other_object
@ -208,6 +208,19 @@ class Collection(Generic[T]):
if other_object is not None:
return o, other_object
if no_sibling:
return self, None
# find in siblings and all children of siblings
for parent in self.parents:
for sibling in parent.children:
if sibling is self:
continue
o, other_object = sibling._find_object(__object, no_sibling=True)
if other_object is not None:
return o, other_object
return self, None
def append(self, __object: Optional[T], already_is_parent: bool = False, from_map: bool = False):
@ -229,33 +242,14 @@ class Collection(Generic[T]):
if existing_object is None:
# append
# print("appending", existing_object, __object)
append_to._data.append(__object)
append_to._map_element(__object, from_map=from_map)
else:
# merge
append_to._unmap_element(existing_object)
existing_object.merge(__object)
append_to._map_element(existing_object, from_map=from_map)
append_to._map_element(__object, from_map=from_map)
"""
exists_in_collection = self._contained_in_sub(__object)
if len(exists_in_collection) and self is exists_in_collection[0]:
# assuming that the object already is contained in the correct collections
if not already_is_parent:
self.merge_into_self(__object, from_map=from_map)
return
if not len(exists_in_collection):
self._append(__object, from_map=from_map)
else:
exists_in_collection[0].merge_into_self(__object, from_map=from_map)
if not already_is_parent or not self._is_root:
for parent_collection in self._get_parents_of_multiple_contained_children(__object):
pass
parent_collection.append(__object, already_is_parent=True, from_map=from_map)
"""
def extend(self, __iterable: Optional[Iterable[T]], from_map: bool = False):
if __iterable is None:
@ -296,8 +290,7 @@ class Collection(Generic[T]):
@property
def data(self) -> List[T]:
return [*self._data,
*(__object for collection in self.children for __object in collection.shallow_list)]
return list(i for i in self.__iter__())
def __len__(self) -> int:
return len(self._data) + sum(len(collection) for collection in self.children)
@ -306,13 +299,17 @@ class Collection(Generic[T]):
def empty(self) -> bool:
return self.__len__() <= 0
def __iter__(self) -> Iterator[T]:
def __iter__(self, finished_ids: set = None) -> Iterator[T]:
_finished_ids = finished_ids or set()
for element in self._data:
if element.id in _finished_ids:
continue
_finished_ids.add(element.id)
yield element
for c in self.children:
for element in c:
yield element
yield from c.__iter__(finished_ids=finished_ids)
def __merge__(self, __other: Collection, override: bool = False):
self.extend(__other._data, from_map=True)

View File

@ -299,7 +299,7 @@ class Album(Base):
@property
def option_string(self) -> str:
return f"{self.__repr__()} " \
f"by Artist({OPTION_STRING_DELIMITER.join([artist.name for artist in self.artist_collection])}) " \
f"by Artist({OPTION_STRING_DELIMITER.join([artist.name + str(artist.id) for artist in self.artist_collection])}) " \
f"under Label({OPTION_STRING_DELIMITER.join([label.name for label in self.label_collection])})"
@property

View File

@ -13,7 +13,7 @@ if not load_dotenv(Path(__file__).parent.parent.parent / ".env"):
__stage__ = os.getenv("STAGE", "prod")
DEBUG = (__stage__ == "dev") and True
DEBUG_LOGGING = DEBUG and False
DEBUG_LOGGING = DEBUG and True
DEBUG_TRACE = DEBUG and True
DEBUG_OBJECT_TRACE = DEBUG and False
DEBUG_YOUTUBE_INITIALIZING = DEBUG and False