2023-12-19 12:58:39 +00:00
|
|
|
from __future__ import annotations
|
2023-02-07 18:26:14 +00:00
|
|
|
|
2023-12-19 12:58:39 +00:00
|
|
|
from collections import defaultdict
|
2024-04-29 21:40:48 +00:00
|
|
|
from typing import TypeVar, Generic, Dict, Optional, Iterable, List, Iterator, Tuple, Generator, Union, Any, Set
|
2023-12-19 12:58:39 +00:00
|
|
|
from .parents import OuterProxy
|
2024-04-19 15:45:49 +00:00
|
|
|
from ..utils import object_trace
|
2024-04-30 10:32:55 +00:00
|
|
|
from ..utils import output, BColors
|
2023-02-23 22:52:41 +00:00
|
|
|
|
2023-12-19 12:58:39 +00:00
|
|
|
T = TypeVar('T', bound=OuterProxy)
|
2023-08-28 18:59:19 +00:00
|
|
|
|
|
|
|
|
2023-12-19 12:58:39 +00:00
|
|
|
class Collection(Generic[T]):
|
2023-12-20 10:02:38 +00:00
|
|
|
__is_collection__ = True
|
|
|
|
|
2023-08-28 18:59:19 +00:00
|
|
|
_data: List[T]
|
2023-02-23 22:52:41 +00:00
|
|
|
|
2024-04-29 16:18:57 +00:00
|
|
|
_indexed_from_id: Dict[int, Dict[str, Any]]
|
|
|
|
_indexed_values: Dict[str, Dict[Any, T]]
|
2023-10-24 09:44:00 +00:00
|
|
|
|
|
|
|
shallow_list = property(fget=lambda self: self.data)
|
|
|
|
|
|
|
|
def __init__(
|
2023-12-19 12:58:39 +00:00
|
|
|
self,
|
|
|
|
data: Optional[Iterable[T]] = None,
|
|
|
|
sync_on_append: Dict[str, Collection] = None,
|
2024-04-23 09:37:49 +00:00
|
|
|
append_object_to_attribute: Dict[str, T] = None,
|
|
|
|
extend_object_to_attribute: Dict[str, Collection] = None,
|
2023-10-24 09:44:00 +00:00
|
|
|
) -> None:
|
2024-04-19 15:45:49 +00:00
|
|
|
self._collection_for: dict = dict()
|
|
|
|
|
2023-10-24 15:41:42 +00:00
|
|
|
self._contains_ids = set()
|
2023-10-24 09:44:00 +00:00
|
|
|
self._data = []
|
2023-12-19 12:58:39 +00:00
|
|
|
|
2023-10-24 09:44:00 +00:00
|
|
|
# List of collection attributes that should be modified on append
|
|
|
|
# Key: collection attribute (str) of appended element
|
|
|
|
# Value: main collection to sync to
|
2023-12-19 12:58:39 +00:00
|
|
|
self.append_object_to_attribute: Dict[str, T] = append_object_to_attribute or {}
|
2024-04-23 09:37:49 +00:00
|
|
|
self.extend_object_to_attribute: Dict[str, Collection[T]] = extend_object_to_attribute or {}
|
2024-04-18 15:20:30 +00:00
|
|
|
self.sync_on_append: Dict[str, Collection] = sync_on_append or {}
|
2024-04-30 06:11:10 +00:00
|
|
|
self.pull_from: List[Collection] = []
|
|
|
|
self.push_to: List[Collection] = []
|
2023-10-24 09:44:00 +00:00
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
# This is to cleanly unmap previously mapped items by their id
|
|
|
|
self._indexed_from_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
|
|
|
|
# this is to keep track and look up the actual objects
|
|
|
|
self._indexed_values: Dict[str, Dict[Any, T]] = defaultdict(dict)
|
2023-12-19 12:58:39 +00:00
|
|
|
|
2023-10-24 09:44:00 +00:00
|
|
|
self.extend(data)
|
2023-02-07 18:26:14 +00:00
|
|
|
|
2024-04-19 15:45:49 +00:00
|
|
|
def __repr__(self) -> str:
|
2024-04-30 15:43:00 +00:00
|
|
|
return f"Collection({' | '.join(self._collection_for.values())} {id(self)})"
|
2024-04-19 15:45:49 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
def _map_element(self, __object: T, no_unmap: bool = False, **kwargs):
|
|
|
|
if not no_unmap:
|
|
|
|
self._unmap_element(__object.id)
|
2024-04-23 09:37:49 +00:00
|
|
|
|
|
|
|
self._indexed_from_id[__object.id]["id"] = __object.id
|
|
|
|
self._indexed_values["id"][__object.id] = __object
|
2023-10-24 15:41:42 +00:00
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
for name, value in __object.indexing_values:
|
2024-04-17 15:24:51 +00:00
|
|
|
if value is None or value == __object._inner._default_values.get(name):
|
2023-03-10 17:38:32 +00:00
|
|
|
continue
|
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
self._indexed_values[name][value] = __object
|
|
|
|
self._indexed_from_id[__object.id][name] = value
|
2023-03-24 14:58:21 +00:00
|
|
|
|
2024-04-18 12:37:20 +00:00
|
|
|
def _unmap_element(self, __object: Union[T, int]):
|
|
|
|
obj_id = __object.id if isinstance(__object, OuterProxy) else __object
|
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
if obj_id not in self._indexed_from_id:
|
|
|
|
return
|
2023-03-24 14:58:21 +00:00
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
for name, value in self._indexed_from_id[obj_id].items():
|
|
|
|
if value in self._indexed_values[name]:
|
|
|
|
del self._indexed_values[name][value]
|
2023-03-09 21:14:39 +00:00
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
del self._indexed_from_id[obj_id]
|
2023-12-20 11:31:53 +00:00
|
|
|
|
2024-04-29 16:18:57 +00:00
|
|
|
def _remap(self):
|
2024-05-03 12:52:12 +00:00
|
|
|
# reinitialize the mapping to clean it without time consuming operations
|
|
|
|
self._indexed_from_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
|
|
|
|
self._indexed_values: Dict[str, Dict[Any, T]] = defaultdict(dict)
|
2024-04-29 16:18:57 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
for e in self._data:
|
|
|
|
self._map_element(e, no_unmap=True)
|
2024-04-30 06:11:10 +00:00
|
|
|
|
2024-05-03 12:55:22 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
def _find_object(self, __object: T, **kwargs) -> Optional[T]:
|
2024-04-29 20:37:07 +00:00
|
|
|
self._remap()
|
|
|
|
|
2024-05-06 15:39:53 +00:00
|
|
|
if __object.id in self._indexed_from_id:
|
|
|
|
return self._indexed_values["id"][__object.id]
|
|
|
|
|
2024-01-15 09:50:24 +00:00
|
|
|
for name, value in __object.indexing_values:
|
2024-04-23 09:37:49 +00:00
|
|
|
if value in self._indexed_values[name]:
|
2024-05-03 12:52:12 +00:00
|
|
|
return self._indexed_values[name][value]
|
2024-01-15 09:50:24 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def _append_new_object(self, other: T, **kwargs):
|
|
|
|
"""
|
|
|
|
This function appends the other object to the current collection.
|
|
|
|
This only works if not another object, which represents the same real life object exists in the collection.
|
2024-01-15 09:50:24 +00:00
|
|
|
"""
|
2024-05-03 12:52:12 +00:00
|
|
|
|
|
|
|
self._data.append(other)
|
2024-01-15 09:50:24 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
# all of the existing hooks to get the defined datastructure
|
|
|
|
for collection_attribute, generator in self.extend_object_to_attribute.items():
|
|
|
|
other.__getattribute__(collection_attribute).extend(generator, **kwargs)
|
2024-04-30 10:32:55 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
for attribute, new_object in self.append_object_to_attribute.items():
|
|
|
|
other.__getattribute__(attribute).append(new_object, **kwargs)
|
2023-12-19 12:58:39 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
for attribute, a in self.sync_on_append.items():
|
|
|
|
# syncing two collections by reference
|
|
|
|
b = other.__getattribute__(attribute)
|
|
|
|
if a is b:
|
|
|
|
continue
|
2024-04-30 15:43:00 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
object_trace(f"Syncing [{a}] = [{b}]")
|
2024-04-30 10:32:55 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
b_data = b.data.copy()
|
|
|
|
b_collection_for = b._collection_for.copy()
|
2024-04-17 11:39:58 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
del b
|
2024-04-17 11:39:58 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
for synced_with, key in b_collection_for.items():
|
|
|
|
synced_with.__setattr__(key, a)
|
|
|
|
a._collection_for[synced_with] = key
|
2024-04-19 15:45:49 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
a.extend(b_data, **kwargs)
|
2024-04-29 20:37:07 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
def append(self, other: Optional[T], **kwargs):
|
|
|
|
"""
|
|
|
|
If an object, that represents the same entity exists in a relevant collection,
|
|
|
|
merge into this object. (and remap)
|
|
|
|
Else append to this collection.
|
2024-04-29 21:40:48 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
:param other:
|
|
|
|
:return:
|
|
|
|
"""
|
2024-04-29 21:40:48 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
if other is None:
|
|
|
|
return
|
|
|
|
if other.id in self._indexed_from_id:
|
|
|
|
return
|
2024-04-29 20:37:07 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
object_trace(f"Appending {other.option_string} to {self}")
|
|
|
|
|
|
|
|
|
|
|
|
for c in self.pull_from:
|
|
|
|
r = c._find_object(other)
|
|
|
|
if r is not None:
|
2024-05-06 08:31:21 +00:00
|
|
|
output("found pull from", r, other, self, color=BColors.RED, sep="\t")
|
|
|
|
other.merge(r, **kwargs)
|
2024-05-06 10:40:06 +00:00
|
|
|
c.remove(r, existing=r, **kwargs)
|
2024-05-03 12:52:12 +00:00
|
|
|
break
|
|
|
|
|
2024-05-06 10:40:06 +00:00
|
|
|
existing_object = self._find_object(other)
|
2024-05-06 15:39:53 +00:00
|
|
|
|
|
|
|
# switching collection in the case of push to
|
|
|
|
for c in self.push_to:
|
|
|
|
r = c._find_object(other)
|
|
|
|
if r is not None:
|
|
|
|
output("found push to", r, other, self, color=BColors.RED, sep="\t")
|
|
|
|
return c.append(other, **kwargs)
|
2024-05-03 12:52:12 +00:00
|
|
|
|
|
|
|
if existing_object is None:
|
2024-05-06 08:31:21 +00:00
|
|
|
self._append_new_object(other, **kwargs)
|
2024-01-15 09:50:24 +00:00
|
|
|
else:
|
2024-05-06 08:31:21 +00:00
|
|
|
existing_object.merge(other, **kwargs)
|
2024-05-03 12:52:12 +00:00
|
|
|
|
2024-05-06 10:40:06 +00:00
|
|
|
def remove(self, *other_list: List[T], silent: bool = False, existing: Optional[T] = None, **kwargs):
|
2024-05-03 12:52:12 +00:00
|
|
|
for other in other_list:
|
2024-05-06 10:40:06 +00:00
|
|
|
existing: Optional[T] = existing or self._indexed_values["id"].get(other.id, None)
|
2024-05-03 12:52:12 +00:00
|
|
|
if existing is None:
|
|
|
|
if not silent:
|
|
|
|
raise ValueError(f"Object {other} not found in {self}")
|
|
|
|
return other
|
2024-04-17 12:15:56 +00:00
|
|
|
|
2024-05-06 08:31:21 +00:00
|
|
|
"""
|
2024-05-03 12:52:12 +00:00
|
|
|
for collection_attribute, generator in self.extend_object_to_attribute.items():
|
|
|
|
other.__getattribute__(collection_attribute).remove(*generator, silent=silent, **kwargs)
|
|
|
|
|
|
|
|
for attribute, new_object in self.append_object_to_attribute.items():
|
|
|
|
other.__getattribute__(attribute).remove(new_object, silent=silent, **kwargs)
|
2024-05-06 08:31:21 +00:00
|
|
|
"""
|
2024-04-30 10:32:55 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
self._data.remove(existing)
|
|
|
|
self._unmap_element(existing)
|
2024-04-30 00:09:52 +00:00
|
|
|
|
2024-05-03 12:55:22 +00:00
|
|
|
def contains(self, __object: T) -> bool:
|
|
|
|
return self._find_object(__object) is not None
|
2023-03-24 14:58:21 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
def extend(self, other_collections: Optional[Generator[T, None, None]], **kwargs):
|
|
|
|
if other_collections is None:
|
2023-10-12 17:24:35 +00:00
|
|
|
return
|
2023-12-19 12:58:39 +00:00
|
|
|
|
2024-05-06 15:39:53 +00:00
|
|
|
for other_object in other_collections:
|
|
|
|
self.append(other_object, **kwargs)
|
2023-10-12 17:24:35 +00:00
|
|
|
|
2023-10-24 09:44:00 +00:00
|
|
|
@property
|
|
|
|
def data(self) -> List[T]:
|
2024-04-17 15:24:51 +00:00
|
|
|
return list(self.__iter__())
|
2023-12-19 12:58:39 +00:00
|
|
|
|
2023-10-24 09:44:00 +00:00
|
|
|
def __len__(self) -> int:
|
2024-04-23 09:37:49 +00:00
|
|
|
return len(self._data)
|
2023-03-03 11:32:08 +00:00
|
|
|
|
2023-12-19 21:11:46 +00:00
|
|
|
@property
|
|
|
|
def empty(self) -> bool:
|
2024-02-28 13:27:35 +00:00
|
|
|
return self.__len__() <= 0
|
2023-12-19 21:11:46 +00:00
|
|
|
|
2024-04-23 09:37:49 +00:00
|
|
|
def __iter__(self) -> Iterator[T]:
|
|
|
|
yield from self._data
|
2024-01-15 09:50:24 +00:00
|
|
|
|
2024-05-03 12:52:12 +00:00
|
|
|
def __merge__(self, other: Collection, **kwargs):
|
2024-05-06 10:40:06 +00:00
|
|
|
object_trace(f"merging {str(self)} | {str(other)}")
|
2024-05-03 12:52:12 +00:00
|
|
|
self.extend(other, **kwargs)
|
2024-01-15 09:50:24 +00:00
|
|
|
|
|
|
|
def __getitem__(self, item: int):
|
2024-04-23 09:37:49 +00:00
|
|
|
return self._data[item]
|
2024-04-25 22:23:04 +00:00
|
|
|
|
|
|
|
def get(self, item: int, default = None):
|
|
|
|
if item >= len(self._data):
|
|
|
|
return default
|
|
|
|
return self._data[item]
|
2024-05-06 10:40:06 +00:00
|
|
|
|
|
|
|
def __eq__(self, other: Collection) -> bool:
|
|
|
|
if self.empty and other.empty:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return self._data == other._data
|