feat: migrated fetch details and from source
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
All checks were successful
ci/woodpecker/push/woodpecker Pipeline was successful
This commit is contained in:
@@ -189,103 +189,7 @@ class Page:
|
||||
def song_search(self, song: Song) -> List[Song]:
|
||||
return []
|
||||
|
||||
def fetch_details(
|
||||
self,
|
||||
music_object: DatabaseObject,
|
||||
stop_at_level: int = 1,
|
||||
) -> DatabaseObject:
|
||||
"""
|
||||
when a music object with lacking data is passed in, it returns
|
||||
the SAME object **(no copy)** with more detailed data.
|
||||
If you for example put in, an album, it fetches the tracklist
|
||||
|
||||
:param music_object:
|
||||
:param stop_at_level:
|
||||
This says the depth of the level the scraper will recurse to.
|
||||
If this is for example set to 2, then the levels could be:
|
||||
1. Level: the album
|
||||
2. Level: every song of the album + every artist of the album
|
||||
If no additional requests are needed to get the data one level below the supposed stop level
|
||||
this gets ignored
|
||||
:return detailed_music_object: IT MODIFIES THE INPUT OBJ
|
||||
"""
|
||||
# creating a new object, of the same type
|
||||
new_music_object: Optional[DatabaseObject] = None
|
||||
fetched_from_url: List[str] = []
|
||||
|
||||
# only certain database objects, have a source list
|
||||
if isinstance(music_object, INDEPENDENT_DB_OBJECTS):
|
||||
source: Source
|
||||
for source in music_object.source_collection.get_sources(self.SOURCE_TYPE):
|
||||
if music_object.already_fetched_from(source.hash_url):
|
||||
continue
|
||||
|
||||
tmp = self.fetch_object_from_source(
|
||||
source=source,
|
||||
enforce_type=type(music_object),
|
||||
stop_at_level=stop_at_level,
|
||||
type_string=type(music_object).__name__,
|
||||
entity_string=music_object.option_string,
|
||||
)
|
||||
|
||||
if new_music_object is None:
|
||||
new_music_object = tmp
|
||||
else:
|
||||
new_music_object.merge(tmp)
|
||||
fetched_from_url.append(source.hash_url)
|
||||
|
||||
if new_music_object is not None:
|
||||
music_object.merge(new_music_object)
|
||||
|
||||
music_object.mark_as_fetched(*fetched_from_url)
|
||||
return music_object
|
||||
|
||||
def fetch_object_from_source(
|
||||
self,
|
||||
source: Source,
|
||||
stop_at_level: int = 2,
|
||||
enforce_type: Type[DatabaseObject] = None,
|
||||
type_string: str = "",
|
||||
entity_string: str = "",
|
||||
) -> Optional[DatabaseObject]:
|
||||
|
||||
obj_type = self.get_source_type(source)
|
||||
|
||||
if obj_type is None:
|
||||
return None
|
||||
|
||||
if enforce_type != obj_type and enforce_type is not None:
|
||||
self.LOGGER.warning(f"Object type isn't type to enforce: {enforce_type}, {obj_type}")
|
||||
return None
|
||||
|
||||
music_object: DatabaseObject = None
|
||||
|
||||
fetch_map = {
|
||||
Song: self.fetch_song,
|
||||
Album: self.fetch_album,
|
||||
Artist: self.fetch_artist,
|
||||
Label: self.fetch_label
|
||||
}
|
||||
|
||||
if obj_type in fetch_map:
|
||||
music_object = fetch_map[obj_type](source, stop_at_level=stop_at_level)
|
||||
else:
|
||||
self.LOGGER.warning(f"Can't fetch details of type: {obj_type}")
|
||||
return None
|
||||
|
||||
if stop_at_level > 0:
|
||||
trace(f"fetching {type_string} [{entity_string}] [stop_at_level={stop_at_level}]")
|
||||
|
||||
collection: Collection
|
||||
for collection_str in music_object.DOWNWARDS_COLLECTION_STRING_ATTRIBUTES:
|
||||
collection = music_object.__getattribute__(collection_str)
|
||||
|
||||
for sub_element in collection:
|
||||
sub_element.merge(
|
||||
self.fetch_details(sub_element, stop_at_level=stop_at_level - 1))
|
||||
|
||||
return music_object
|
||||
|
||||
# to fetch stuff
|
||||
def fetch_song(self, source: Source, stop_at_level: int = 1) -> Song:
|
||||
return Song()
|
||||
|
||||
|
Reference in New Issue
Block a user