fixed really disgusting bug
This commit is contained in:
parent
a20f7ae59e
commit
f58685af58
@ -9,6 +9,7 @@ from .parents import DatabaseObject
|
||||
class AppendResult:
|
||||
was_in_collection: bool
|
||||
current_element: DatabaseObject
|
||||
was_the_same: bool
|
||||
|
||||
|
||||
class Collection:
|
||||
@ -82,12 +83,17 @@ class Collection:
|
||||
if self.element_type is not None and not isinstance(element, self.element_type):
|
||||
raise TypeError(f"{type(element)} is not the set type {self.element_type}")
|
||||
|
||||
# return if the same instance of the object is in the list
|
||||
for existing in self._data:
|
||||
if element is existing:
|
||||
return AppendResult(True, element, True)
|
||||
|
||||
for name, value in element.indexing_values:
|
||||
if value in self._attribute_to_object_map[name]:
|
||||
existing_object = self._attribute_to_object_map[name][value]
|
||||
|
||||
if not merge_on_conflict:
|
||||
return AppendResult(True, existing_object)
|
||||
return AppendResult(True, existing_object, False)
|
||||
|
||||
# if the object does already exist
|
||||
# thus merging and don't add it afterwards
|
||||
@ -95,7 +101,7 @@ class Collection:
|
||||
existing_object.merge(element)
|
||||
# in case any relevant data has been added (e.g. it remaps the old object)
|
||||
self.map_element(existing_object)
|
||||
return AppendResult(True, existing_object)
|
||||
return AppendResult(True, existing_object, False)
|
||||
|
||||
element.merge(existing_object)
|
||||
|
||||
@ -104,12 +110,12 @@ class Collection:
|
||||
|
||||
self.unmap_element(existing_object)
|
||||
self.map_element(element)
|
||||
return AppendResult(True, existing_object)
|
||||
return AppendResult(True, existing_object, False)
|
||||
|
||||
self._data.append(element)
|
||||
self.map_element(element)
|
||||
|
||||
return AppendResult(False, element)
|
||||
return AppendResult(False, element, False)
|
||||
|
||||
def extend(self, element_list: Iterable[DatabaseObject], merge_on_conflict: bool = True,
|
||||
merge_into_existing: bool = True):
|
||||
|
@ -223,6 +223,8 @@ class Page:
|
||||
|
||||
cls._clean_music_object(new_music_object, collections)
|
||||
|
||||
print(collections[Album])
|
||||
|
||||
music_object.merge(new_music_object)
|
||||
|
||||
music_object.compile(merge_into=True)
|
||||
@ -280,13 +282,11 @@ class Page:
|
||||
return
|
||||
|
||||
for i, element in enumerate(collection):
|
||||
r = collection_dict[collection.element_type].append(element)
|
||||
if not r.was_in_collection:
|
||||
cls._clean_music_object(r.current_element, collection_dict)
|
||||
continue
|
||||
|
||||
r = collection_dict[collection.element_type].append(element, merge_into_existing=True)
|
||||
collection[i] = r.current_element
|
||||
cls._clean_music_object(r.current_element, collection_dict)
|
||||
|
||||
if not r.was_the_same:
|
||||
cls._clean_music_object(r.current_element, collection_dict)
|
||||
|
||||
@classmethod
|
||||
def _clean_label(cls, label: Label, collections: Dict[Union[Type[Song], Type[Album], Type[Artist], Type[Label]], Collection]):
|
||||
|
@ -565,6 +565,7 @@ class Musify(Page):
|
||||
for card_soup in soup.find_all("div", {"class": "card"}):
|
||||
new_album: Album = cls.parse_album_card(card_soup, artist_name)
|
||||
album_source: Source
|
||||
|
||||
if stop_at_level > 1:
|
||||
for album_source in new_album.source_collection.get_sources_from_page(cls.SOURCE_TYPE):
|
||||
new_album.merge(cls._fetch_album_from_source(album_source, stop_at_level=stop_at_level-1))
|
||||
|
@ -12,6 +12,10 @@ def fetch_artist():
|
||||
source_list=[objects.Source(objects.SourcePages.MUSIFY, "https://musify.club/artist/psychonaut-4-83193")]
|
||||
)
|
||||
|
||||
artist = objects.Artist(
|
||||
source_list=[objects.Source(objects.SourcePages.MUSIFY, "https://musify.club/artist/ghost-bath-280348/")]
|
||||
)
|
||||
|
||||
artist = Musify.fetch_details(artist)
|
||||
print(artist.options)
|
||||
|
||||
@ -33,4 +37,4 @@ def fetch_album():
|
||||
print(artist.id, artist.name)
|
||||
|
||||
if __name__ == "__main__":
|
||||
search()
|
||||
fetch_artist()
|
||||
|
Loading…
Reference in New Issue
Block a user