Merge pull request #1002 from olivierkes/duplicate_ids

Potential fix for #719, #1001: data loss when reordering items in upward direction
This commit is contained in:
Tobias Frisch 2022-05-28 12:03:58 +02:00 committed by GitHub
commit e780963891
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 21 additions and 2 deletions

View file

@ -224,15 +224,23 @@ class abstractItem():
if max([self.IDs.count(i) for i in self.IDs if i]) != 1:
LOGGER.warning("There are some items with overlapping IDs: %s", [i for i in self.IDs if i and self.IDs.count(i) != 1])
_IDs = [self.ID()]
def checkChildren(item):
"Check recursively every children and give them unique, non-empty, non-zero IDs."
for c in item.children():
_id = c.ID()
if not _id or _id == "0":
if not _id or _id == "0" or _id in _IDs:
c.getUniqueID()
LOGGER.warning("* Item {} '{}' is given new unique ID: '{}'".format(_id, c.title(), c.ID()))
_IDs.append(_id)
checkChildren(c)
checkChildren(self)
# Not sure if self.IDs is still useful (it was used in the old unique ID generating system at least).
# It might be deleted everywhere. But just in the meantime, it should at least be up to date.
self.IDs = self.listAllIDs()
def listAllIDs(self):
IDs = [self.ID()]
for c in self.children():

View file

@ -438,7 +438,18 @@ class abstractModel(QAbstractItemModel):
for item in items:
if item.ID() in IDs:
item.getUniqueID(recursive=True)
# Items don't get new IDs, because they are not part of a model yet,
# so the following call does nothing:
# item.getUniqueID(recursive=True)
# Instead we need to remove IDs (recursively) in all copied items, so that they
# will receive new ones when inserted within the model.
def removeIDs(i):
i.setData(item.enum.ID, None)
for c in i.children():
removeIDs(c)
removeIDs(item)
r = self.insertItems(items, beginRow, parent)