diff --git a/src/main/java/caosdb/server/jobs/Job.java b/src/main/java/caosdb/server/jobs/Job.java index 8d98cbc30f382bc18473a6a8aabf2017c9fec55e..78c8ed61945a2d29772455ebeae7b8323a1c8140 100644 --- a/src/main/java/caosdb/server/jobs/Job.java +++ b/src/main/java/caosdb/server/jobs/Job.java @@ -186,7 +186,7 @@ public abstract class Job extends AbstractObservable implements Observer { if (ret != null) { return ret; } - } else if (version.startsWith("HEAD~")) { + } else if (version.startsWith("HEAD~") && getEntityById(id) != null) { // if version is HEAD~{OFFSET} with {OFFSET} > 0 and the targeted entity is // part of this request (i.e. is to be updated), the actual offset has to be // reduced by 1. HEAD always denotes the entity@HEAD *after* the successful diff --git a/src/main/java/caosdb/server/jobs/core/RemoveDuplicates.java b/src/main/java/caosdb/server/jobs/core/RemoveDuplicates.java index a104c124293cf07ecc3b871c4bc6b31470ec2a15..a09f66533cb3bc3c7cc01b2d6931bc120f87f7a7 100644 --- a/src/main/java/caosdb/server/jobs/core/RemoveDuplicates.java +++ b/src/main/java/caosdb/server/jobs/core/RemoveDuplicates.java @@ -30,17 +30,20 @@ public class RemoveDuplicates extends ContainerJob { @Override protected void run() { - final HashSet<EntityInterface> rm = new HashSet<EntityInterface>(); + // collect duplicates + final HashSet<EntityInterface> duplicates = new HashSet<EntityInterface>(); for (final EntityInterface e : getContainer()) { - if (e.hasId() && !rm.contains(e)) { + if (e.hasId() && !duplicates.contains(e)) { for (final EntityInterface e2 : getContainer()) { - if (e2 != e && e.getId().equals(e2.getId())) { - rm.add(e2); + if (e2 != e && e.getIdVersion().equals(e2.getIdVersion())) { + // this is a duplicate of another entity in this container + duplicates.add(e2); } } } } - for (final EntityInterface e : rm) { + // remove duplicates. + for (final EntityInterface e : duplicates) { getContainer().remove(e); } }