feat: marginally improved hacking
This commit is contained in:
parent
b34e9be52a
commit
0ec1a162be
@ -104,7 +104,7 @@ for _id, _object in objects_by_id.items():
|
|||||||
|
|
||||||
print(only_smile)
|
print(only_smile)
|
||||||
|
|
||||||
|
"""
|
||||||
c = Collection([Song(title="hi"), Song(title="hi2"), Song(title="hi3")])
|
c = Collection([Song(title="hi"), Song(title="hi2"), Song(title="hi3")])
|
||||||
c1 = Collection([Song(title="he"), Song(title="hi5")])
|
c1 = Collection([Song(title="he"), Song(title="hi5")])
|
||||||
c11 = Collection([Song(title="wow how ultra subby", isrc="hiii")])
|
c11 = Collection([Song(title="wow how ultra subby", isrc="hiii")])
|
||||||
@ -153,3 +153,4 @@ print("b: ", b)
|
|||||||
|
|
||||||
print(c.data)
|
print(c.data)
|
||||||
print(c._data)
|
print(c._data)
|
||||||
|
"""
|
@ -7,6 +7,9 @@ from .utils.config import logging_settings, main_settings, read_config
|
|||||||
read_config()
|
read_config()
|
||||||
from . import cli
|
from . import cli
|
||||||
|
|
||||||
|
if True:
|
||||||
|
import sys
|
||||||
|
sys.setrecursionlimit(100)
|
||||||
|
|
||||||
# configure logger default
|
# configure logger default
|
||||||
logging.basicConfig(
|
logging.basicConfig(
|
||||||
|
@ -23,6 +23,7 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
contain_attribute_in_given: Dict[str, "Collection"] = None,
|
contain_attribute_in_given: Dict[str, "Collection"] = None,
|
||||||
append_object_to_attribute: Dict[str, DatabaseObject] = None
|
append_object_to_attribute: Dict[str, DatabaseObject] = None
|
||||||
) -> None:
|
) -> None:
|
||||||
|
self._contains_ids = set()
|
||||||
self._data = []
|
self._data = []
|
||||||
self.upper_collections: List[Collection[T]] = []
|
self.upper_collections: List[Collection[T]] = []
|
||||||
self.contained_collections: List[Collection[T]] = []
|
self.contained_collections: List[Collection[T]] = []
|
||||||
@ -42,7 +43,9 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
|
|
||||||
self.extend(data)
|
self.extend(data)
|
||||||
|
|
||||||
def _map_element(self, __object: T):
|
def _map_element(self, __object: T, from_map: bool = False):
|
||||||
|
self._contains_ids.add(__object.id)
|
||||||
|
|
||||||
for name, value in __object.indexing_values:
|
for name, value in __object.indexing_values:
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
@ -50,7 +53,19 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
self._indexed_values[name].add(value)
|
self._indexed_values[name].add(value)
|
||||||
self._indexed_to_objects[value].append(__object)
|
self._indexed_to_objects[value].append(__object)
|
||||||
|
|
||||||
|
if not from_map:
|
||||||
|
for attribute, new_object in self.contain_given_in_attribute.items():
|
||||||
|
__object.__getattribute__(attribute).contain_collection_inside(new_object)
|
||||||
|
|
||||||
|
for attribute, new_object in self.contain_given_in_attribute.items():
|
||||||
|
new_object.contain_collection_inside(__object.__getattribute__(attribute))
|
||||||
|
|
||||||
|
for attribute, new_object in self.append_object_to_attribute.items():
|
||||||
|
__object.__getattribute__(attribute).append(new_object, from_map = True)
|
||||||
|
|
||||||
def _unmap_element(self, __object: T):
|
def _unmap_element(self, __object: T):
|
||||||
|
self._contains_ids.remove(__object.id)
|
||||||
|
|
||||||
for name, value in __object.indexing_values:
|
for name, value in __object.indexing_values:
|
||||||
if value is None:
|
if value is None:
|
||||||
continue
|
continue
|
||||||
@ -118,13 +133,15 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
def _merge_in_self(self, __object: T, from_map: bool = False):
|
||||||
def _merge_in_self(self, __object: T):
|
|
||||||
"""
|
"""
|
||||||
1. find existing objects
|
1. find existing objects
|
||||||
2. merge into existing object
|
2. merge into existing object
|
||||||
3. remap existing object
|
3. remap existing object
|
||||||
"""
|
"""
|
||||||
|
if __object.id in self._contains_ids:
|
||||||
|
return
|
||||||
|
|
||||||
existing_object: DatabaseObject = None
|
existing_object: DatabaseObject = None
|
||||||
|
|
||||||
for name, value in __object.indexing_values:
|
for name, value in __object.indexing_values:
|
||||||
@ -132,7 +149,10 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
continue
|
continue
|
||||||
if value in self._indexed_values[name]:
|
if value in self._indexed_values[name]:
|
||||||
existing_object = self._indexed_to_objects[value][0]
|
existing_object = self._indexed_to_objects[value][0]
|
||||||
break
|
if existing_object == __object:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
if existing_object is None:
|
if existing_object is None:
|
||||||
return None
|
return None
|
||||||
@ -143,34 +163,39 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
if existing_object.id != __object.id:
|
if existing_object.id != __object.id:
|
||||||
raise ValueError("This should NEVER happen. Merging doesn't work.")
|
raise ValueError("This should NEVER happen. Merging doesn't work.")
|
||||||
|
|
||||||
self._map_element(existing_object)
|
self._map_element(existing_object, from_map = from_map)
|
||||||
|
|
||||||
def contains(self, __object: T) -> bool:
|
def contains(self, __object: T) -> bool:
|
||||||
return len(self._contained_in_sub(__object)) > 0
|
return len(self._contained_in_sub(__object)) > 0
|
||||||
|
|
||||||
def _append(self, __object: T):
|
def _append(self, __object: T, from_map: bool = False):
|
||||||
self._map_element(__object)
|
for attribute, to_sync_with in self.sync_on_append.items():
|
||||||
|
to_sync_with.sync_with_other_collection(__object.__getattribute__(attribute))
|
||||||
|
|
||||||
|
self._map_element(__object, from_map=from_map)
|
||||||
self._data.append(__object)
|
self._data.append(__object)
|
||||||
|
|
||||||
def append(self, __object: Optional[T], already_is_parent: bool = False):
|
def append(self, __object: Optional[T], already_is_parent: bool = False, from_map: bool = False):
|
||||||
if __object is None:
|
if __object is None:
|
||||||
return
|
return
|
||||||
|
if __object.id in self._contains_ids:
|
||||||
|
return
|
||||||
|
|
||||||
exists_in_collection = self._contained_in_sub(__object)
|
exists_in_collection = self._contained_in_sub(__object)
|
||||||
if len(exists_in_collection) and self is exists_in_collection[0]:
|
if len(exists_in_collection) and self is exists_in_collection[0]:
|
||||||
# assuming that the object already is contained in the correct collections
|
# assuming that the object already is contained in the correct collections
|
||||||
if not already_is_parent:
|
if not already_is_parent:
|
||||||
self._merge_in_self(__object)
|
self._merge_in_self(__object, from_map = from_map)
|
||||||
return
|
return
|
||||||
|
|
||||||
if not len(exists_in_collection):
|
if not len(exists_in_collection):
|
||||||
self._append(__object)
|
self._append(__object, from_map=from_map)
|
||||||
else:
|
else:
|
||||||
exists_in_collection[0]._merge_in_self(__object)
|
exists_in_collection[0]._merge_in_self(__object, from_map = from_map)
|
||||||
|
|
||||||
if not already_is_parent or not self._is_root:
|
if not already_is_parent or not self._is_root:
|
||||||
for parent_collection in self._get_parents_of_multiple_contained_children(__object):
|
for parent_collection in self._get_parents_of_multiple_contained_children(__object):
|
||||||
parent_collection.append(__object, already_is_parent=True)
|
parent_collection.append(__object, already_is_parent=True, from_map=from_map)
|
||||||
|
|
||||||
def extend(self, __iterable: Optional[Iterable[T]]):
|
def extend(self, __iterable: Optional[Iterable[T]]):
|
||||||
if __iterable is None:
|
if __iterable is None:
|
||||||
@ -202,7 +227,7 @@ class Collection(Generic[T], metaclass=MetaClass):
|
|||||||
|
|
||||||
# now the ugly part
|
# now the ugly part
|
||||||
# replace all refs of the other element with this one
|
# replace all refs of the other element with this one
|
||||||
self.merge(equal_collection)
|
self._risky_merge(equal_collection)
|
||||||
|
|
||||||
|
|
||||||
def contain_collection_inside(self, sub_collection: "Collection"):
|
def contain_collection_inside(self, sub_collection: "Collection"):
|
||||||
|
@ -153,7 +153,7 @@ class DatabaseObject(metaclass=MetaClass):
|
|||||||
if other is None:
|
if other is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self is other:
|
if self.id == other.id:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not isinstance(other, type(self)):
|
if not isinstance(other, type(self)):
|
||||||
@ -173,7 +173,7 @@ class DatabaseObject(metaclass=MetaClass):
|
|||||||
setattr(self, simple_attribute, getattr(other, simple_attribute))
|
setattr(self, simple_attribute, getattr(other, simple_attribute))
|
||||||
|
|
||||||
if replace_all_refs:
|
if replace_all_refs:
|
||||||
super().merge(other)
|
self._risky_merge(other)
|
||||||
|
|
||||||
def strip_details(self):
|
def strip_details(self):
|
||||||
for collection in type(self).DOWNWARDS_COLLECTION_STRING_ATTRIBUTES:
|
for collection in type(self).DOWNWARDS_COLLECTION_STRING_ATTRIBUTES:
|
||||||
|
@ -9,21 +9,23 @@ class Lake:
|
|||||||
self.id_to_object: Dict[int, object] = {}
|
self.id_to_object: Dict[int, object] = {}
|
||||||
|
|
||||||
def get_real_object(self, db_object: object) -> object:
|
def get_real_object(self, db_object: object) -> object:
|
||||||
def _get_real_id(_id: int) -> int:
|
_id = id(db_object)
|
||||||
return self.redirects.get(_id, _id)
|
while _id in self.redirects:
|
||||||
|
_id = self.redirects[_id]
|
||||||
|
|
||||||
_id = _get_real_id(id(db_object))
|
try:
|
||||||
if _id not in self.id_to_object:
|
return self.id_to_object[_id]
|
||||||
|
except KeyError:
|
||||||
self.add(db_object)
|
self.add(db_object)
|
||||||
|
return db_object
|
||||||
return self.id_to_object[_id]
|
|
||||||
|
|
||||||
def add(self, db_object: object):
|
def add(self, db_object: object):
|
||||||
self.id_to_object[id(db_object)] = db_object
|
self.id_to_object[id(db_object)] = db_object
|
||||||
|
|
||||||
def override(self, to_override: object, new_db_object: object):
|
def override(self, to_override: object, new_db_object: object):
|
||||||
self.redirects[id(to_override)] = id(new_db_object)
|
self.redirects[id(to_override)] = id(new_db_object)
|
||||||
del self.id_to_object[id(to_override)]
|
if id(to_override) in self.id_to_object:
|
||||||
|
del self.id_to_object[id(to_override)]
|
||||||
|
|
||||||
|
|
||||||
lake = Lake()
|
lake = Lake()
|
||||||
@ -32,17 +34,13 @@ lake = Lake()
|
|||||||
def wrapper(method):
|
def wrapper(method):
|
||||||
@wraps(method)
|
@wraps(method)
|
||||||
def wrapped(*args, **kwargs):
|
def wrapped(*args, **kwargs):
|
||||||
if len(args) >= 0 and method.__name__ != "__init__":
|
return method(*(lake.get_real_object(args[0]), *args[1:]), **kwargs)
|
||||||
_self = lake.get_real_object(args[0])
|
|
||||||
args = (_self, *args[1:])
|
|
||||||
|
|
||||||
return method(*args, **kwargs)
|
|
||||||
return wrapped
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class BaseClass:
|
class BaseClass:
|
||||||
def merge(self, to_replace):
|
def _risky_merge(self, to_replace):
|
||||||
lake.override(to_replace, self)
|
lake.override(to_replace, self)
|
||||||
|
|
||||||
|
|
||||||
@ -57,7 +55,7 @@ class MetaClass(type):
|
|||||||
newClassDict[attributeName] = attribute
|
newClassDict[attributeName] = attribute
|
||||||
|
|
||||||
for key, value in object.__dict__.items( ):
|
for key, value in object.__dict__.items( ):
|
||||||
if hasattr( value, '__call__' ) and value not in newClassDict and key not in ("__new__", "__repr__", "__init__"):
|
if hasattr( value, '__call__' ) and value not in newClassDict and key not in ("__new__", "__init__"):
|
||||||
newClassDict[key] = wrapper(value)
|
newClassDict[key] = wrapper(value)
|
||||||
|
|
||||||
new_instance = type.__new__(meta, classname, bases, newClassDict)
|
new_instance = type.__new__(meta, classname, bases, newClassDict)
|
||||||
|
Loading…
Reference in New Issue
Block a user