fix: if 2 proxies refer to multiple objects the merge unsyncs them causing a recursion depth error

This commit is contained in:
2024-04-15 13:56:40 +02:00
parent 5284c1f55c
commit a5ede2a6ad
3 changed files with 21 additions and 14 deletions

View File

@@ -26,9 +26,11 @@ class InnerData:
If the data in the wrapper class has to be merged, then this class is just replaced and garbage collected.
"""
_multiple_instances = False
_refers_to_instances: set = None
def __init__(self, object_type, **kwargs):
self._refers_to_instances = set()
# initialize the default values
self.__default_values = {}
for name, factory in object_type._default_factories.items():
@@ -176,23 +178,28 @@ class OuterProxy:
:return:
"""
if __other is None:
_ = "debug"
return
a = self
b = __other
if a._inner._multiple_instances and b._inner._multiple_instances:
LOGGER.warning(f"Both instances data obj are shared over multiple objects. This will lead so them being unsynchronized at some point. {a} {b}")
if a._inner is b._inner:
return
if b._inner._multiple_instances:
# switch instances if more efficient
if len(b._inner._refers_to_instances) > len(a._inner._refers_to_instances):
a, b = b, a
a._inner.__merge__(b._inner, override=override)
if len(b._inner._refers_to_instances) > 1:
for instance in b._inner._refers_to_instances:
instance._inner = a._inner
b._inner = a._inner
b._inner._multiple_instances = True
b._inner._refers_to_instances.add(a)
b._inner._refers_to_instances.add(b)
def mark_as_fetched(self, *url_hash_list: List[str]):
for url_hash in url_hash_list: