2023-10-24 12:53:29 +00:00
|
|
|
from __future__ import annotations
|
2023-03-28 08:45:41 +00:00
|
|
|
import random
|
2023-04-04 19:10:47 +00:00
|
|
|
from collections import defaultdict
|
2023-09-14 21:35:37 +00:00
|
|
|
from typing import Optional, Dict, Tuple, List, Type, Generic, TypeVar, Any
|
|
|
|
from dataclasses import dataclass
|
2022-12-01 12:15:30 +00:00
|
|
|
|
2023-03-10 08:09:35 +00:00
|
|
|
from .metadata import Metadata
|
2023-03-10 20:28:13 +00:00
|
|
|
from .option import Options
|
2023-09-11 19:34:45 +00:00
|
|
|
from ..utils.shared import HIGHEST_ID
|
2023-09-10 14:27:09 +00:00
|
|
|
from ..utils.config import main_settings, logging_settings
|
2023-10-23 14:21:44 +00:00
|
|
|
from ..utils.support_classes.hacking import MetaClass
|
2023-09-10 14:27:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
LOGGER = logging_settings["object_logger"]
|
2022-12-01 12:15:30 +00:00
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
P = TypeVar('P')
|
2023-09-14 21:35:37 +00:00
|
|
|
|
|
|
|
@dataclass
|
2023-10-12 17:24:35 +00:00
|
|
|
class StaticAttribute(Generic[P]):
|
2023-09-14 21:35:37 +00:00
|
|
|
name: str
|
|
|
|
|
|
|
|
default_value: Any = None
|
|
|
|
weight: float = 0
|
|
|
|
|
|
|
|
is_collection: bool = False
|
|
|
|
is_downwards_collection: bool = False
|
|
|
|
is_upwards_collection: bool = False
|
|
|
|
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
class Attribute(Generic[P]):
|
2023-09-14 21:35:37 +00:00
|
|
|
def __init__(self, database_object: "DatabaseObject", static_attribute: StaticAttribute) -> None:
|
|
|
|
self.database_object: DatabaseObject = database_object
|
|
|
|
self.static_attribute: StaticAttribute = static_attribute
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
@property
|
|
|
|
def name(self) -> str:
|
|
|
|
return self.static_attribute.name
|
|
|
|
|
|
|
|
def get(self) -> P:
|
2023-09-14 21:35:37 +00:00
|
|
|
return self.database_object.__getattribute__(self.name)
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
def set(self, value: P):
|
2023-09-14 21:35:37 +00:00
|
|
|
self.database_object.__setattr__(self.name, value)
|
|
|
|
|
|
|
|
|
2022-12-06 22:44:42 +00:00
|
|
|
|
2023-10-23 14:21:44 +00:00
|
|
|
class DatabaseObject(metaclass=MetaClass):
|
2023-09-14 21:35:37 +00:00
|
|
|
COLLECTION_STRING_ATTRIBUTES: tuple = tuple()
|
|
|
|
SIMPLE_STRING_ATTRIBUTES: dict = dict()
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-05-24 23:27:05 +00:00
|
|
|
# contains all collection attributes, which describe something "smaller"
|
|
|
|
# e.g. album has songs, but not artist.
|
2023-09-14 21:35:37 +00:00
|
|
|
DOWNWARDS_COLLECTION_STRING_ATTRIBUTES: tuple = tuple()
|
|
|
|
UPWARDS_COLLECTION_STRING_ATTRIBUTES: tuple = tuple()
|
|
|
|
|
|
|
|
STATIC_ATTRIBUTES: List[StaticAttribute] = list()
|
2023-05-24 23:27:05 +00:00
|
|
|
|
2023-04-12 10:15:12 +00:00
|
|
|
def __init__(self, _id: int = None, dynamic: bool = False, **kwargs) -> None:
|
2023-03-16 13:36:49 +00:00
|
|
|
self.automatic_id: bool = False
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-02-25 21:16:32 +00:00
|
|
|
if _id is None and not dynamic:
|
|
|
|
"""
|
2023-04-12 10:15:12 +00:00
|
|
|
generates a random integer id
|
|
|
|
64 bit integer, but this is defined in shared.py in ID_BITS
|
|
|
|
the range is defined in the Tuple ID_RANGE
|
2023-02-25 21:16:32 +00:00
|
|
|
"""
|
2023-09-11 19:34:45 +00:00
|
|
|
_id = random.randint(0, HIGHEST_ID)
|
2023-03-16 13:36:49 +00:00
|
|
|
self.automatic_id = True
|
2023-09-13 14:01:01 +00:00
|
|
|
# LOGGER.debug(f"Id for {type(self).__name__} isn't set. Setting to {_id}")
|
2023-02-25 21:16:32 +00:00
|
|
|
|
2023-09-14 21:35:37 +00:00
|
|
|
self._attributes: List[Attribute] = []
|
|
|
|
self._simple_attribute_list: List[Attribute] = []
|
|
|
|
self._collection_attributes: List[Attribute] = []
|
|
|
|
self._downwards_collection_attributes: List[Attribute] = []
|
|
|
|
self._upwards_collection_attributes: List[Attribute] = []
|
|
|
|
|
|
|
|
for static_attribute in self.STATIC_ATTRIBUTES:
|
|
|
|
attribute: Attribute = Attribute(self, static_attribute)
|
|
|
|
self._attributes.append(attribute)
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
if static_attribute.is_collection:
|
2023-09-14 21:35:37 +00:00
|
|
|
if static_attribute.is_collection:
|
|
|
|
self._collection_attributes.append(attribute)
|
|
|
|
if static_attribute.is_upwards_collection:
|
|
|
|
self._upwards_collection_attributes.append(attribute)
|
|
|
|
if static_attribute.is_downwards_collection:
|
|
|
|
self._downwards_collection_attributes.append(attribute)
|
2023-10-12 17:24:35 +00:00
|
|
|
else:
|
|
|
|
self._simple_attribute_list.append(attribute)
|
2023-09-14 21:35:37 +00:00
|
|
|
|
2023-02-25 21:16:32 +00:00
|
|
|
# The id can only be None, if the object is dynamic (self.dynamic = True)
|
2023-04-12 10:15:12 +00:00
|
|
|
self.id: Optional[int] = _id
|
2023-02-10 12:52:18 +00:00
|
|
|
|
2022-12-12 18:30:18 +00:00
|
|
|
self.dynamic = dynamic
|
2023-03-28 08:45:41 +00:00
|
|
|
self.build_version = -1
|
2023-03-09 19:52:15 +00:00
|
|
|
|
2023-09-14 21:35:37 +00:00
|
|
|
@property
|
2023-10-24 12:53:29 +00:00
|
|
|
def upwards_collection(self) -> Collection:
|
2023-09-14 21:35:37 +00:00
|
|
|
for attribute in self._upwards_collection_attributes:
|
|
|
|
yield attribute.get()
|
|
|
|
|
|
|
|
@property
|
2023-10-24 12:53:29 +00:00
|
|
|
def downwards_collection(self) -> Collection:
|
2023-09-14 21:35:37 +00:00
|
|
|
for attribute in self._downwards_collection_attributes:
|
|
|
|
yield attribute.get()
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
@property
|
2023-10-24 12:53:29 +00:00
|
|
|
def all_collections(self) -> Collection:
|
2023-10-12 17:24:35 +00:00
|
|
|
for attribute in self._collection_attributes:
|
|
|
|
yield attribute.get()
|
|
|
|
|
2023-06-12 17:46:46 +00:00
|
|
|
def __hash__(self):
|
|
|
|
if self.dynamic:
|
|
|
|
raise TypeError("Dynamic DatabaseObjects are unhashable.")
|
|
|
|
return self.id
|
|
|
|
|
2023-03-09 19:52:15 +00:00
|
|
|
def __eq__(self, other) -> bool:
|
|
|
|
if not isinstance(other, type(self)):
|
|
|
|
return False
|
|
|
|
|
2023-04-12 10:15:12 +00:00
|
|
|
# add the checks for dynamic, to not throw an exception
|
|
|
|
if not self.dynamic and not other.dynamic and self.id == other.id:
|
|
|
|
return True
|
|
|
|
|
2023-03-09 19:52:15 +00:00
|
|
|
temp_attribute_map: Dict[str, set] = defaultdict(set)
|
|
|
|
|
|
|
|
# building map with sets
|
|
|
|
for name, value in self.indexing_values:
|
|
|
|
temp_attribute_map[name].add(value)
|
|
|
|
|
|
|
|
# check against the attributes of the other object
|
|
|
|
for name, other_value in other.indexing_values:
|
|
|
|
if other_value in temp_attribute_map[name]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-03-09 17:19:49 +00:00
|
|
|
@property
|
2023-03-09 17:35:56 +00:00
|
|
|
def indexing_values(self) -> List[Tuple[str, object]]:
|
2023-03-09 17:19:49 +00:00
|
|
|
"""
|
|
|
|
returns a map of the name and values of the attributes.
|
|
|
|
This helps in comparing classes for equal data (eg. being the same song but different attributes)
|
|
|
|
|
|
|
|
Returns:
|
2023-03-09 17:35:56 +00:00
|
|
|
List[Tuple[str, object]]: the first element in the tuple is the name of the attribute, the second the value.
|
2023-03-09 17:19:49 +00:00
|
|
|
"""
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-03-09 17:35:56 +00:00
|
|
|
return list()
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-10-17 20:29:55 +00:00
|
|
|
def merge(self, other, override: bool = False, replace_all_refs: bool = False):
|
2023-10-24 09:44:00 +00:00
|
|
|
print("merge")
|
|
|
|
|
2023-05-24 08:12:03 +00:00
|
|
|
if other is None:
|
|
|
|
return
|
|
|
|
|
2023-10-24 15:41:42 +00:00
|
|
|
if self.id == other.id:
|
2023-03-28 08:45:41 +00:00
|
|
|
return
|
|
|
|
|
2023-03-10 17:38:32 +00:00
|
|
|
if not isinstance(other, type(self)):
|
2023-03-03 11:32:08 +00:00
|
|
|
LOGGER.warning(f"can't merge \"{type(other)}\" into \"{type(self)}\"")
|
|
|
|
return
|
|
|
|
|
2023-10-12 17:24:35 +00:00
|
|
|
for collection in self._collection_attributes:
|
|
|
|
if hasattr(self, collection.name) and hasattr(other, collection.name):
|
|
|
|
if collection.get() is not getattr(other, collection.name):
|
|
|
|
collection.get().extend(getattr(other, collection.name))
|
2023-03-03 11:32:08 +00:00
|
|
|
|
2023-09-14 21:35:37 +00:00
|
|
|
for simple_attribute, default_value in type(self).SIMPLE_STRING_ATTRIBUTES.items():
|
2023-03-18 11:36:53 +00:00
|
|
|
if getattr(other, simple_attribute) == default_value:
|
2023-03-03 11:32:08 +00:00
|
|
|
continue
|
|
|
|
|
2023-03-18 11:36:53 +00:00
|
|
|
if override or getattr(self, simple_attribute) == default_value:
|
2023-03-03 11:32:08 +00:00
|
|
|
setattr(self, simple_attribute, getattr(other, simple_attribute))
|
2022-12-12 18:30:18 +00:00
|
|
|
|
2023-10-17 20:29:55 +00:00
|
|
|
if replace_all_refs:
|
2023-10-24 15:41:42 +00:00
|
|
|
self._risky_merge(other)
|
2023-10-17 20:29:55 +00:00
|
|
|
|
2023-06-16 08:43:35 +00:00
|
|
|
def strip_details(self):
|
2023-09-14 21:35:37 +00:00
|
|
|
for collection in type(self).DOWNWARDS_COLLECTION_STRING_ATTRIBUTES:
|
2023-06-16 08:43:35 +00:00
|
|
|
getattr(self, collection).clear()
|
|
|
|
|
2023-03-10 08:09:35 +00:00
|
|
|
@property
|
|
|
|
def metadata(self) -> Metadata:
|
|
|
|
return Metadata()
|
|
|
|
|
2023-03-10 09:54:15 +00:00
|
|
|
@property
|
2023-06-12 12:56:14 +00:00
|
|
|
def options(self) -> List["DatabaseObject"]:
|
|
|
|
return [self]
|
2023-03-10 09:54:15 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def option_string(self) -> str:
|
|
|
|
return self.__repr__()
|
2023-03-28 07:21:41 +00:00
|
|
|
|
2023-03-28 08:45:41 +00:00
|
|
|
def _build_recursive_structures(self, build_version: int, merge: False):
|
|
|
|
pass
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-03-28 08:45:41 +00:00
|
|
|
def compile(self, merge_into: bool = False):
|
2023-03-14 10:03:54 +00:00
|
|
|
"""
|
|
|
|
compiles the recursive structures,
|
2023-03-28 07:21:41 +00:00
|
|
|
and does depending on the object some other stuff.
|
|
|
|
|
|
|
|
no need to override if only the recursive structure should be build.
|
|
|
|
override self.build_recursive_structures() instead
|
2023-03-14 10:03:54 +00:00
|
|
|
"""
|
2023-03-28 07:21:41 +00:00
|
|
|
|
2023-03-28 08:45:41 +00:00
|
|
|
self._build_recursive_structures(build_version=random.randint(0, 99999), merge=merge_into)
|
2023-03-10 09:54:15 +00:00
|
|
|
|
2023-09-12 08:16:33 +00:00
|
|
|
def _add_other_db_objects(self, object_type: Type["DatabaseObject"], object_list: List["DatabaseObject"]):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def add_list_of_other_objects(self, object_list: List["DatabaseObject"]):
|
|
|
|
d: Dict[Type[DatabaseObject], List[DatabaseObject]] = defaultdict(list)
|
|
|
|
|
|
|
|
for db_object in object_list:
|
|
|
|
d[type(db_object)].append(db_object)
|
|
|
|
|
|
|
|
for key, value in d.items():
|
|
|
|
self._add_other_db_objects(key, value)
|
|
|
|
|
2022-12-12 18:30:18 +00:00
|
|
|
|
2023-02-25 21:16:32 +00:00
|
|
|
class MainObject(DatabaseObject):
|
|
|
|
"""
|
|
|
|
This is the parent class for all "main" data objects:
|
|
|
|
- Song
|
|
|
|
- Album
|
|
|
|
- Artist
|
|
|
|
- Label
|
2022-12-01 12:15:30 +00:00
|
|
|
|
2023-02-25 21:16:32 +00:00
|
|
|
It has all the functionality of the "DatabaseObject" (it inherits from said class)
|
|
|
|
but also some added functions as well.
|
|
|
|
"""
|
2023-03-18 11:36:53 +00:00
|
|
|
|
2023-04-12 10:15:12 +00:00
|
|
|
def __init__(self, _id: int = None, dynamic: bool = False, **kwargs):
|
2023-03-06 14:20:26 +00:00
|
|
|
DatabaseObject.__init__(self, _id=_id, dynamic=dynamic, **kwargs)
|
2022-12-01 12:15:30 +00:00
|
|
|
|
2023-02-25 21:16:32 +00:00
|
|
|
self.additional_arguments: dict = kwargs
|